aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/Jinja2/py2/jinja2
diff options
context:
space:
mode:
authorfloatdrop <floatdrop@yandex-team.ru>2022-02-10 16:47:15 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:47:15 +0300
commite63b84f1d39557d9e46ac380b1f388271894293c (patch)
tree338cdaff3fb027e030b847db66df06019a0e3149 /contrib/python/Jinja2/py2/jinja2
parentf60febb7ea449535e7b073c386c7ff0539637fc0 (diff)
downloadydb-e63b84f1d39557d9e46ac380b1f388271894293c.tar.gz
Restoring authorship annotation for <floatdrop@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/python/Jinja2/py2/jinja2')
-rw-r--r--contrib/python/Jinja2/py2/jinja2/__init__.py8
-rw-r--r--contrib/python/Jinja2/py2/jinja2/_compat.py170
-rw-r--r--contrib/python/Jinja2/py2/jinja2/_identifier.py2
-rw-r--r--contrib/python/Jinja2/py2/jinja2/bccache.py574
-rw-r--r--contrib/python/Jinja2/py2/jinja2/compiler.py2328
-rw-r--r--contrib/python/Jinja2/py2/jinja2/constants.py38
-rw-r--r--contrib/python/Jinja2/py2/jinja2/debug.py138
-rw-r--r--contrib/python/Jinja2/py2/jinja2/defaults.py32
-rw-r--r--contrib/python/Jinja2/py2/jinja2/environment.py2050
-rw-r--r--contrib/python/Jinja2/py2/jinja2/exceptions.py258
-rw-r--r--contrib/python/Jinja2/py2/jinja2/ext.py950
-rw-r--r--contrib/python/Jinja2/py2/jinja2/filters.py1858
-rw-r--r--contrib/python/Jinja2/py2/jinja2/idtracking.py518
-rw-r--r--contrib/python/Jinja2/py2/jinja2/lexer.py820
-rw-r--r--contrib/python/Jinja2/py2/jinja2/loaders.py878
-rw-r--r--contrib/python/Jinja2/py2/jinja2/meta.py174
-rw-r--r--contrib/python/Jinja2/py2/jinja2/nativetypes.py84
-rw-r--r--contrib/python/Jinja2/py2/jinja2/nodes.py1700
-rw-r--r--contrib/python/Jinja2/py2/jinja2/optimizer.py34
-rw-r--r--contrib/python/Jinja2/py2/jinja2/parser.py1278
-rw-r--r--contrib/python/Jinja2/py2/jinja2/runtime.py1142
-rw-r--r--contrib/python/Jinja2/py2/jinja2/sandbox.py768
-rw-r--r--contrib/python/Jinja2/py2/jinja2/tests.py248
-rw-r--r--contrib/python/Jinja2/py2/jinja2/utils.py986
-rw-r--r--contrib/python/Jinja2/py2/jinja2/visitor.py154
25 files changed, 8595 insertions, 8595 deletions
diff --git a/contrib/python/Jinja2/py2/jinja2/__init__.py b/contrib/python/Jinja2/py2/jinja2/__init__.py
index b444e9a841..6fd022be73 100644
--- a/contrib/python/Jinja2/py2/jinja2/__init__.py
+++ b/contrib/python/Jinja2/py2/jinja2/__init__.py
@@ -1,11 +1,11 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Jinja is a template engine written in pure Python. It provides a
non-XML syntax that supports inline expressions and an optional
sandboxed environment.
-"""
+"""
from markupsafe import escape
from markupsafe import Markup
-
+
from .bccache import BytecodeCache
from .bccache import FileSystemBytecodeCache
from .bccache import MemcachedBytecodeCache
@@ -41,5 +41,5 @@ from .utils import environmentfunction
from .utils import evalcontextfunction
from .utils import is_undefined
from .utils import select_autoescape
-
+
__version__ = "2.11.3"
diff --git a/contrib/python/Jinja2/py2/jinja2/_compat.py b/contrib/python/Jinja2/py2/jinja2/_compat.py
index 1f044954a0..0632a9e99d 100644
--- a/contrib/python/Jinja2/py2/jinja2/_compat.py
+++ b/contrib/python/Jinja2/py2/jinja2/_compat.py
@@ -1,110 +1,110 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
# flake8: noqa
import marshal
-import sys
-
-PY2 = sys.version_info[0] == 2
+import sys
+
+PY2 = sys.version_info[0] == 2
PYPY = hasattr(sys, "pypy_translation_info")
-_identity = lambda x: x
-
-if not PY2:
- unichr = chr
- range_type = range
- text_type = str
- string_types = (str,)
- integer_types = (int,)
-
- iterkeys = lambda d: iter(d.keys())
- itervalues = lambda d: iter(d.values())
- iteritems = lambda d: iter(d.items())
-
- import pickle
- from io import BytesIO, StringIO
-
- NativeStringIO = StringIO
-
- def reraise(tp, value, tb=None):
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
- ifilter = filter
- imap = map
- izip = zip
- intern = sys.intern
-
- implements_iterator = _identity
- implements_to_string = _identity
- encode_filename = _identity
-
+_identity = lambda x: x
+
+if not PY2:
+ unichr = chr
+ range_type = range
+ text_type = str
+ string_types = (str,)
+ integer_types = (int,)
+
+ iterkeys = lambda d: iter(d.keys())
+ itervalues = lambda d: iter(d.values())
+ iteritems = lambda d: iter(d.items())
+
+ import pickle
+ from io import BytesIO, StringIO
+
+ NativeStringIO = StringIO
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+ ifilter = filter
+ imap = map
+ izip = zip
+ intern = sys.intern
+
+ implements_iterator = _identity
+ implements_to_string = _identity
+ encode_filename = _identity
+
marshal_dump = marshal.dump
marshal_load = marshal.load
-else:
- unichr = unichr
- text_type = unicode
- range_type = xrange
- string_types = (str, unicode)
- integer_types = (int, long)
-
- iterkeys = lambda d: d.iterkeys()
- itervalues = lambda d: d.itervalues()
- iteritems = lambda d: d.iteritems()
-
- import cPickle as pickle
- from cStringIO import StringIO as BytesIO, StringIO
-
- NativeStringIO = BytesIO
-
+else:
+ unichr = unichr
+ text_type = unicode
+ range_type = xrange
+ string_types = (str, unicode)
+ integer_types = (int, long)
+
+ iterkeys = lambda d: d.iterkeys()
+ itervalues = lambda d: d.itervalues()
+ iteritems = lambda d: d.iteritems()
+
+ import cPickle as pickle
+ from cStringIO import StringIO as BytesIO, StringIO
+
+ NativeStringIO = BytesIO
+
exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
-
- from itertools import imap, izip, ifilter
-
- intern = intern
-
- def implements_iterator(cls):
- cls.next = cls.__next__
- del cls.__next__
- return cls
-
- def implements_to_string(cls):
- cls.__unicode__ = cls.__str__
+
+ from itertools import imap, izip, ifilter
+
+ intern = intern
+
+ def implements_iterator(cls):
+ cls.next = cls.__next__
+ del cls.__next__
+ return cls
+
+ def implements_to_string(cls):
+ cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode("utf-8")
- return cls
-
- def encode_filename(filename):
- if isinstance(filename, unicode):
+ return cls
+
+ def encode_filename(filename):
+ if isinstance(filename, unicode):
return filename.encode("utf-8")
- return filename
-
+ return filename
+
def marshal_dump(code, f):
if isinstance(f, file):
marshal.dump(code, f)
else:
f.write(marshal.dumps(code))
-
+
def marshal_load(f):
if isinstance(f, file):
return marshal.load(f)
return marshal.loads(f.read())
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a
- # dummy metaclass for one level of class instantiation that replaces
- # itself with the actual metaclass.
- class metaclass(type):
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
-
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a
+ # dummy metaclass for one level of class instantiation that replaces
+ # itself with the actual metaclass.
+ class metaclass(type):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
return type.__new__(metaclass, "temporary_class", (), {})
+
-
-try:
- from urllib.parse import quote_from_bytes as url_quote
-except ImportError:
- from urllib import quote as url_quote
+try:
+ from urllib.parse import quote_from_bytes as url_quote
+except ImportError:
+ from urllib import quote as url_quote
try:
diff --git a/contrib/python/Jinja2/py2/jinja2/_identifier.py b/contrib/python/Jinja2/py2/jinja2/_identifier.py
index 224d5449d1..b0a761fc2a 100644
--- a/contrib/python/Jinja2/py2/jinja2/_identifier.py
+++ b/contrib/python/Jinja2/py2/jinja2/_identifier.py
@@ -1,6 +1,6 @@
import re
-# generated by scripts/generate_identifier_pattern.py
+# generated by scripts/generate_identifier_pattern.py
pattern = re.compile(
r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950
)
diff --git a/contrib/python/Jinja2/py2/jinja2/bccache.py b/contrib/python/Jinja2/py2/jinja2/bccache.py
index 9c0661030f..4ed6579d9c 100644
--- a/contrib/python/Jinja2/py2/jinja2/bccache.py
+++ b/contrib/python/Jinja2/py2/jinja2/bccache.py
@@ -1,28 +1,28 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""The optional bytecode cache system. This is useful if you have very
complex template situations and the compilation of all those templates
slows down your application too much.
-
+
Situations where this is useful are often forking web applications that
are initialized on the first request.
-"""
+"""
import errno
import fnmatch
-import os
+import os
import stat
-import sys
-import tempfile
-from hashlib import sha1
+import sys
+import tempfile
+from hashlib import sha1
from os import listdir
from os import path
-
+
from ._compat import BytesIO
from ._compat import marshal_dump
from ._compat import marshal_load
from ._compat import pickle
from ._compat import text_type
from .utils import open_if_exists
-
+
bc_version = 4
# Magic bytes to identify Jinja bytecode cache files. Contains the
# Python major and minor version to avoid loading incompatible bytecode
@@ -32,291 +32,291 @@ bc_magic = (
+ pickle.dumps(bc_version, 2)
+ pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
)
-
-
-class Bucket(object):
- """Buckets are used to store the bytecode for one template. It's created
- and initialized by the bytecode cache and passed to the loading functions.
-
- The buckets get an internal checksum from the cache assigned and use this
- to automatically reject outdated cache material. Individual bytecode
- cache subclasses don't have to care about cache invalidation.
- """
-
- def __init__(self, environment, key, checksum):
- self.environment = environment
- self.key = key
- self.checksum = checksum
- self.reset()
-
- def reset(self):
- """Resets the bucket (unloads the bytecode)."""
- self.code = None
-
- def load_bytecode(self, f):
- """Loads bytecode from a file or file like object."""
- # make sure the magic header is correct
- magic = f.read(len(bc_magic))
- if magic != bc_magic:
- self.reset()
- return
- # the source code of the file changed, we need to reload
- checksum = pickle.load(f)
- if self.checksum != checksum:
- self.reset()
- return
- # if marshal_load fails then we need to reload
- try:
- self.code = marshal_load(f)
- except (EOFError, ValueError, TypeError):
- self.reset()
- return
-
- def write_bytecode(self, f):
- """Dump the bytecode into the file or file like object passed."""
- if self.code is None:
+
+
+class Bucket(object):
+ """Buckets are used to store the bytecode for one template. It's created
+ and initialized by the bytecode cache and passed to the loading functions.
+
+ The buckets get an internal checksum from the cache assigned and use this
+ to automatically reject outdated cache material. Individual bytecode
+ cache subclasses don't have to care about cache invalidation.
+ """
+
+ def __init__(self, environment, key, checksum):
+ self.environment = environment
+ self.key = key
+ self.checksum = checksum
+ self.reset()
+
+ def reset(self):
+ """Resets the bucket (unloads the bytecode)."""
+ self.code = None
+
+ def load_bytecode(self, f):
+ """Loads bytecode from a file or file like object."""
+ # make sure the magic header is correct
+ magic = f.read(len(bc_magic))
+ if magic != bc_magic:
+ self.reset()
+ return
+ # the source code of the file changed, we need to reload
+ checksum = pickle.load(f)
+ if self.checksum != checksum:
+ self.reset()
+ return
+ # if marshal_load fails then we need to reload
+ try:
+ self.code = marshal_load(f)
+ except (EOFError, ValueError, TypeError):
+ self.reset()
+ return
+
+ def write_bytecode(self, f):
+ """Dump the bytecode into the file or file like object passed."""
+ if self.code is None:
raise TypeError("can't write empty bucket")
- f.write(bc_magic)
- pickle.dump(self.checksum, f, 2)
- marshal_dump(self.code, f)
-
- def bytecode_from_string(self, string):
- """Load bytecode from a string."""
- self.load_bytecode(BytesIO(string))
-
- def bytecode_to_string(self):
- """Return the bytecode as string."""
- out = BytesIO()
- self.write_bytecode(out)
- return out.getvalue()
-
-
-class BytecodeCache(object):
- """To implement your own bytecode cache you have to subclass this class
- and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
- these methods are passed a :class:`~jinja2.bccache.Bucket`.
-
- A very basic bytecode cache that saves the bytecode on the file system::
-
- from os import path
-
- class MyCache(BytecodeCache):
-
- def __init__(self, directory):
- self.directory = directory
-
- def load_bytecode(self, bucket):
- filename = path.join(self.directory, bucket.key)
- if path.exists(filename):
- with open(filename, 'rb') as f:
- bucket.load_bytecode(f)
-
- def dump_bytecode(self, bucket):
- filename = path.join(self.directory, bucket.key)
- with open(filename, 'wb') as f:
- bucket.write_bytecode(f)
-
- A more advanced version of a filesystem based bytecode cache is part of
+ f.write(bc_magic)
+ pickle.dump(self.checksum, f, 2)
+ marshal_dump(self.code, f)
+
+ def bytecode_from_string(self, string):
+ """Load bytecode from a string."""
+ self.load_bytecode(BytesIO(string))
+
+ def bytecode_to_string(self):
+ """Return the bytecode as string."""
+ out = BytesIO()
+ self.write_bytecode(out)
+ return out.getvalue()
+
+
+class BytecodeCache(object):
+ """To implement your own bytecode cache you have to subclass this class
+ and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
+ these methods are passed a :class:`~jinja2.bccache.Bucket`.
+
+ A very basic bytecode cache that saves the bytecode on the file system::
+
+ from os import path
+
+ class MyCache(BytecodeCache):
+
+ def __init__(self, directory):
+ self.directory = directory
+
+ def load_bytecode(self, bucket):
+ filename = path.join(self.directory, bucket.key)
+ if path.exists(filename):
+ with open(filename, 'rb') as f:
+ bucket.load_bytecode(f)
+
+ def dump_bytecode(self, bucket):
+ filename = path.join(self.directory, bucket.key)
+ with open(filename, 'wb') as f:
+ bucket.write_bytecode(f)
+
+ A more advanced version of a filesystem based bytecode cache is part of
Jinja.
- """
-
- def load_bytecode(self, bucket):
- """Subclasses have to override this method to load bytecode into a
- bucket. If they are not able to find code in the cache for the
- bucket, it must not do anything.
- """
- raise NotImplementedError()
-
- def dump_bytecode(self, bucket):
- """Subclasses have to override this method to write the bytecode
- from a bucket back to the cache. If it unable to do so it must not
- fail silently but raise an exception.
- """
- raise NotImplementedError()
-
- def clear(self):
+ """
+
+ def load_bytecode(self, bucket):
+ """Subclasses have to override this method to load bytecode into a
+ bucket. If they are not able to find code in the cache for the
+ bucket, it must not do anything.
+ """
+ raise NotImplementedError()
+
+ def dump_bytecode(self, bucket):
+ """Subclasses have to override this method to write the bytecode
+ from a bucket back to the cache. If it unable to do so it must not
+ fail silently but raise an exception.
+ """
+ raise NotImplementedError()
+
+ def clear(self):
"""Clears the cache. This method is not used by Jinja but should be
- implemented to allow applications to clear the bytecode cache used
- by a particular environment.
- """
-
- def get_cache_key(self, name, filename=None):
- """Returns the unique hash key for this template name."""
+ implemented to allow applications to clear the bytecode cache used
+ by a particular environment.
+ """
+
+ def get_cache_key(self, name, filename=None):
+ """Returns the unique hash key for this template name."""
hash = sha1(name.encode("utf-8"))
- if filename is not None:
+ if filename is not None:
filename = "|" + filename
- if isinstance(filename, text_type):
+ if isinstance(filename, text_type):
filename = filename.encode("utf-8")
- hash.update(filename)
- return hash.hexdigest()
-
- def get_source_checksum(self, source):
- """Returns a checksum for the source."""
+ hash.update(filename)
+ return hash.hexdigest()
+
+ def get_source_checksum(self, source):
+ """Returns a checksum for the source."""
return sha1(source.encode("utf-8")).hexdigest()
-
- def get_bucket(self, environment, name, filename, source):
- """Return a cache bucket for the given template. All arguments are
- mandatory but filename may be `None`.
- """
- key = self.get_cache_key(name, filename)
- checksum = self.get_source_checksum(source)
- bucket = Bucket(environment, key, checksum)
- self.load_bytecode(bucket)
- return bucket
-
- def set_bucket(self, bucket):
- """Put the bucket into the cache."""
- self.dump_bytecode(bucket)
-
-
-class FileSystemBytecodeCache(BytecodeCache):
- """A bytecode cache that stores bytecode on the filesystem. It accepts
- two arguments: The directory where the cache items are stored and a
- pattern string that is used to build the filename.
-
- If no directory is specified a default cache directory is selected. On
- Windows the user's temp directory is used, on UNIX systems a directory
- is created for the user in the system temp directory.
-
- The pattern can be used to have multiple separate caches operate on the
- same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
- is replaced with the cache key.
-
- >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
-
- This bytecode cache supports clearing of the cache using the clear method.
- """
-
+
+ def get_bucket(self, environment, name, filename, source):
+ """Return a cache bucket for the given template. All arguments are
+ mandatory but filename may be `None`.
+ """
+ key = self.get_cache_key(name, filename)
+ checksum = self.get_source_checksum(source)
+ bucket = Bucket(environment, key, checksum)
+ self.load_bytecode(bucket)
+ return bucket
+
+ def set_bucket(self, bucket):
+ """Put the bucket into the cache."""
+ self.dump_bytecode(bucket)
+
+
+class FileSystemBytecodeCache(BytecodeCache):
+ """A bytecode cache that stores bytecode on the filesystem. It accepts
+ two arguments: The directory where the cache items are stored and a
+ pattern string that is used to build the filename.
+
+ If no directory is specified a default cache directory is selected. On
+ Windows the user's temp directory is used, on UNIX systems a directory
+ is created for the user in the system temp directory.
+
+ The pattern can be used to have multiple separate caches operate on the
+ same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
+ is replaced with the cache key.
+
+ >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
+
+ This bytecode cache supports clearing of the cache using the clear method.
+ """
+
def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
- if directory is None:
- directory = self._get_default_cache_dir()
- self.directory = directory
- self.pattern = pattern
-
- def _get_default_cache_dir(self):
- def _unsafe_dir():
+ if directory is None:
+ directory = self._get_default_cache_dir()
+ self.directory = directory
+ self.pattern = pattern
+
+ def _get_default_cache_dir(self):
+ def _unsafe_dir():
raise RuntimeError(
"Cannot determine safe temp directory. You "
"need to explicitly provide one."
)
-
- tmpdir = tempfile.gettempdir()
-
- # On windows the temporary directory is used specific unless
- # explicitly forced otherwise. We can just use that.
+
+ tmpdir = tempfile.gettempdir()
+
+ # On windows the temporary directory is used specific unless
+ # explicitly forced otherwise. We can just use that.
if os.name == "nt":
- return tmpdir
+ return tmpdir
if not hasattr(os, "getuid"):
- _unsafe_dir()
-
+ _unsafe_dir()
+
dirname = "_jinja2-cache-%d" % os.getuid()
- actual_dir = os.path.join(tmpdir, dirname)
-
- try:
- os.mkdir(actual_dir, stat.S_IRWXU)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- try:
- os.chmod(actual_dir, stat.S_IRWXU)
- actual_dir_stat = os.lstat(actual_dir)
+ actual_dir = os.path.join(tmpdir, dirname)
+
+ try:
+ os.mkdir(actual_dir, stat.S_IRWXU)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ try:
+ os.chmod(actual_dir, stat.S_IRWXU)
+ actual_dir_stat = os.lstat(actual_dir)
if (
actual_dir_stat.st_uid != os.getuid()
or not stat.S_ISDIR(actual_dir_stat.st_mode)
or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
):
- _unsafe_dir()
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- actual_dir_stat = os.lstat(actual_dir)
+ _unsafe_dir()
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ actual_dir_stat = os.lstat(actual_dir)
if (
actual_dir_stat.st_uid != os.getuid()
or not stat.S_ISDIR(actual_dir_stat.st_mode)
or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
):
- _unsafe_dir()
-
- return actual_dir
-
- def _get_cache_filename(self, bucket):
- return path.join(self.directory, self.pattern % bucket.key)
-
- def load_bytecode(self, bucket):
+ _unsafe_dir()
+
+ return actual_dir
+
+ def _get_cache_filename(self, bucket):
+ return path.join(self.directory, self.pattern % bucket.key)
+
+ def load_bytecode(self, bucket):
f = open_if_exists(self._get_cache_filename(bucket), "rb")
- if f is not None:
- try:
- bucket.load_bytecode(f)
- finally:
- f.close()
-
- def dump_bytecode(self, bucket):
+ if f is not None:
+ try:
+ bucket.load_bytecode(f)
+ finally:
+ f.close()
+
+ def dump_bytecode(self, bucket):
f = open(self._get_cache_filename(bucket), "wb")
- try:
- bucket.write_bytecode(f)
- finally:
- f.close()
-
- def clear(self):
- # imported lazily here because google app-engine doesn't support
- # write access on the file system and the function does not exist
- # normally.
- from os import remove
+ try:
+ bucket.write_bytecode(f)
+ finally:
+ f.close()
+
+ def clear(self):
+ # imported lazily here because google app-engine doesn't support
+ # write access on the file system and the function does not exist
+ # normally.
+ from os import remove
files = fnmatch.filter(listdir(self.directory), self.pattern % "*")
- for filename in files:
- try:
- remove(path.join(self.directory, filename))
- except OSError:
- pass
-
-
-class MemcachedBytecodeCache(BytecodeCache):
- """This class implements a bytecode cache that uses a memcache cache for
- storing the information. It does not enforce a specific memcache library
- (tummy's memcache or cmemcache) but will accept any class that provides
- the minimal interface required.
-
- Libraries compatible with this class:
-
+ for filename in files:
+ try:
+ remove(path.join(self.directory, filename))
+ except OSError:
+ pass
+
+
+class MemcachedBytecodeCache(BytecodeCache):
+ """This class implements a bytecode cache that uses a memcache cache for
+ storing the information. It does not enforce a specific memcache library
+ (tummy's memcache or cmemcache) but will accept any class that provides
+ the minimal interface required.
+
+ Libraries compatible with this class:
+
- `cachelib <https://github.com/pallets/cachelib>`_
- `python-memcached <https://pypi.org/project/python-memcached/>`_
-
- (Unfortunately the django cache interface is not compatible because it
- does not support storing binary data, only unicode. You can however pass
- the underlying cache client to the bytecode cache which is available
- as `django.core.cache.cache._client`.)
-
- The minimal interface for the client passed to the constructor is this:
-
- .. class:: MinimalClientInterface
-
- .. method:: set(key, value[, timeout])
-
- Stores the bytecode in the cache. `value` is a string and
- `timeout` the timeout of the key. If timeout is not provided
- a default timeout or no timeout should be assumed, if it's
- provided it's an integer with the number of seconds the cache
- item should exist.
-
- .. method:: get(key)
-
- Returns the value for the cache key. If the item does not
- exist in the cache the return value must be `None`.
-
- The other arguments to the constructor are the prefix for all keys that
- is added before the actual cache key and the timeout for the bytecode in
- the cache system. We recommend a high (or no) timeout.
-
- This bytecode cache does not support clearing of used items in the cache.
- The clear method is a no-operation function.
-
- .. versionadded:: 2.7
- Added support for ignoring memcache errors through the
- `ignore_memcache_errors` parameter.
- """
-
+
+ (Unfortunately the django cache interface is not compatible because it
+ does not support storing binary data, only unicode. You can however pass
+ the underlying cache client to the bytecode cache which is available
+ as `django.core.cache.cache._client`.)
+
+ The minimal interface for the client passed to the constructor is this:
+
+ .. class:: MinimalClientInterface
+
+ .. method:: set(key, value[, timeout])
+
+ Stores the bytecode in the cache. `value` is a string and
+ `timeout` the timeout of the key. If timeout is not provided
+ a default timeout or no timeout should be assumed, if it's
+ provided it's an integer with the number of seconds the cache
+ item should exist.
+
+ .. method:: get(key)
+
+ Returns the value for the cache key. If the item does not
+ exist in the cache the return value must be `None`.
+
+ The other arguments to the constructor are the prefix for all keys that
+ is added before the actual cache key and the timeout for the bytecode in
+ the cache system. We recommend a high (or no) timeout.
+
+ This bytecode cache does not support clearing of used items in the cache.
+ The clear method is a no-operation function.
+
+ .. versionadded:: 2.7
+ Added support for ignoring memcache errors through the
+ `ignore_memcache_errors` parameter.
+ """
+
def __init__(
self,
client,
@@ -324,27 +324,27 @@ class MemcachedBytecodeCache(BytecodeCache):
timeout=None,
ignore_memcache_errors=True,
):
- self.client = client
- self.prefix = prefix
- self.timeout = timeout
- self.ignore_memcache_errors = ignore_memcache_errors
-
- def load_bytecode(self, bucket):
- try:
- code = self.client.get(self.prefix + bucket.key)
- except Exception:
- if not self.ignore_memcache_errors:
- raise
- code = None
- if code is not None:
- bucket.bytecode_from_string(code)
-
- def dump_bytecode(self, bucket):
- args = (self.prefix + bucket.key, bucket.bytecode_to_string())
- if self.timeout is not None:
- args += (self.timeout,)
- try:
- self.client.set(*args)
- except Exception:
- if not self.ignore_memcache_errors:
- raise
+ self.client = client
+ self.prefix = prefix
+ self.timeout = timeout
+ self.ignore_memcache_errors = ignore_memcache_errors
+
+ def load_bytecode(self, bucket):
+ try:
+ code = self.client.get(self.prefix + bucket.key)
+ except Exception:
+ if not self.ignore_memcache_errors:
+ raise
+ code = None
+ if code is not None:
+ bucket.bytecode_from_string(code)
+
+ def dump_bytecode(self, bucket):
+ args = (self.prefix + bucket.key, bucket.bytecode_to_string())
+ if self.timeout is not None:
+ args += (self.timeout,)
+ try:
+ self.client.set(*args)
+ except Exception:
+ if not self.ignore_memcache_errors:
+ raise
diff --git a/contrib/python/Jinja2/py2/jinja2/compiler.py b/contrib/python/Jinja2/py2/jinja2/compiler.py
index 63297b42c3..6bec80c9ce 100644
--- a/contrib/python/Jinja2/py2/jinja2/compiler.py
+++ b/contrib/python/Jinja2/py2/jinja2/compiler.py
@@ -1,13 +1,13 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Compiles nodes from the parser into Python code."""
from collections import namedtuple
from functools import update_wrapper
-from itertools import chain
-from keyword import iskeyword as is_python_keyword
-
+from itertools import chain
+from keyword import iskeyword as is_python_keyword
+
from markupsafe import escape
from markupsafe import Markup
-
+
from . import nodes
from ._compat import imap
from ._compat import iteritems
@@ -27,7 +27,7 @@ from .optimizer import Optimizer
from .utils import concat
from .visitor import NodeVisitor
-operators = {
+operators = {
"eq": "==",
"ne": "!=",
"gt": ">",
@@ -36,559 +36,559 @@ operators = {
"lteq": "<=",
"in": "in",
"notin": "not in",
-}
-
-# what method to iterate over items do we want to use for dict iteration
-# in generated code? on 2.x let's go with iteritems, on 3.x with items
+}
+
+# what method to iterate over items do we want to use for dict iteration
+# in generated code? on 2.x let's go with iteritems, on 3.x with items
if hasattr(dict, "iteritems"):
dict_item_iter = "iteritems"
-else:
+else:
dict_item_iter = "items"
-
+
code_features = ["division"]
-
-# does this python version support generator stops? (PEP 0479)
-try:
+
+# does this python version support generator stops? (PEP 0479)
+try:
exec("from __future__ import generator_stop")
code_features.append("generator_stop")
-except SyntaxError:
- pass
-
-# does this python version support yield from?
-try:
+except SyntaxError:
+ pass
+
+# does this python version support yield from?
+try:
exec("def f(): yield from x()")
-except SyntaxError:
- supports_yield_from = False
-else:
- supports_yield_from = True
-
-
-def optimizeconst(f):
- def new_func(self, node, frame, **kwargs):
- # Only optimize if the frame is not volatile
- if self.optimized and not frame.eval_ctx.volatile:
- new_node = self.optimizer.visit(node, frame.eval_ctx)
- if new_node != node:
- return self.visit(new_node, frame)
- return f(self, node, frame, **kwargs)
-
- return update_wrapper(new_func, f)
-
-
+except SyntaxError:
+ supports_yield_from = False
+else:
+ supports_yield_from = True
+
+
+def optimizeconst(f):
+ def new_func(self, node, frame, **kwargs):
+ # Only optimize if the frame is not volatile
+ if self.optimized and not frame.eval_ctx.volatile:
+ new_node = self.optimizer.visit(node, frame.eval_ctx)
+ if new_node != node:
+ return self.visit(new_node, frame)
+ return f(self, node, frame, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
def generate(
node, environment, name, filename, stream=None, defer_init=False, optimized=True
):
- """Generate the python source for a node tree."""
- if not isinstance(node, nodes.Template):
+ """Generate the python source for a node tree."""
+ if not isinstance(node, nodes.Template):
raise TypeError("Can't compile non template nodes")
generator = environment.code_generator_class(
environment, name, filename, stream, defer_init, optimized
)
- generator.visit(node)
- if stream is None:
- return generator.stream.getvalue()
-
-
-def has_safe_repr(value):
- """Does the node have a safe representation?"""
- if value is None or value is NotImplemented or value is Ellipsis:
- return True
- if type(value) in (bool, int, float, complex, range_type, Markup) + string_types:
- return True
- if type(value) in (tuple, list, set, frozenset):
- for item in value:
- if not has_safe_repr(item):
- return False
- return True
- elif type(value) is dict:
- for key, value in iteritems(value):
- if not has_safe_repr(key):
- return False
- if not has_safe_repr(value):
- return False
- return True
- return False
-
-
-def find_undeclared(nodes, names):
- """Check if the names passed are accessed undeclared. The return value
- is a set of all the undeclared names from the sequence of names found.
- """
- visitor = UndeclaredNameVisitor(names)
- try:
- for node in nodes:
- visitor.visit(node)
- except VisitorExit:
- pass
- return visitor.undeclared
-
-
-class MacroRef(object):
- def __init__(self, node):
- self.node = node
- self.accesses_caller = False
- self.accesses_kwargs = False
- self.accesses_varargs = False
-
-
-class Frame(object):
- """Holds compile time information for us."""
-
- def __init__(self, eval_ctx, parent=None, level=None):
- self.eval_ctx = eval_ctx
+ generator.visit(node)
+ if stream is None:
+ return generator.stream.getvalue()
+
+
+def has_safe_repr(value):
+ """Does the node have a safe representation?"""
+ if value is None or value is NotImplemented or value is Ellipsis:
+ return True
+ if type(value) in (bool, int, float, complex, range_type, Markup) + string_types:
+ return True
+ if type(value) in (tuple, list, set, frozenset):
+ for item in value:
+ if not has_safe_repr(item):
+ return False
+ return True
+ elif type(value) is dict:
+ for key, value in iteritems(value):
+ if not has_safe_repr(key):
+ return False
+ if not has_safe_repr(value):
+ return False
+ return True
+ return False
+
+
+def find_undeclared(nodes, names):
+ """Check if the names passed are accessed undeclared. The return value
+ is a set of all the undeclared names from the sequence of names found.
+ """
+ visitor = UndeclaredNameVisitor(names)
+ try:
+ for node in nodes:
+ visitor.visit(node)
+ except VisitorExit:
+ pass
+ return visitor.undeclared
+
+
+class MacroRef(object):
+ def __init__(self, node):
+ self.node = node
+ self.accesses_caller = False
+ self.accesses_kwargs = False
+ self.accesses_varargs = False
+
+
+class Frame(object):
+ """Holds compile time information for us."""
+
+ def __init__(self, eval_ctx, parent=None, level=None):
+ self.eval_ctx = eval_ctx
self.symbols = Symbols(parent and parent.symbols or None, level=level)
-
- # a toplevel frame is the root + soft frames such as if conditions.
- self.toplevel = False
-
- # the root frame is basically just the outermost frame, so no if
- # conditions. This information is used to optimize inheritance
- # situations.
- self.rootlevel = False
-
- # in some dynamic inheritance situations the compiler needs to add
- # write tests around output statements.
- self.require_output_check = parent and parent.require_output_check
-
- # inside some tags we are using a buffer rather than yield statements.
- # this for example affects {% filter %} or {% macro %}. If a frame
- # is buffered this variable points to the name of the list used as
- # buffer.
- self.buffer = None
-
- # the name of the block we're in, otherwise None.
- self.block = parent and parent.block or None
-
- # the parent of this frame
- self.parent = parent
-
- if parent is not None:
- self.buffer = parent.buffer
-
- def copy(self):
- """Create a copy of the current one."""
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.symbols = self.symbols.copy()
- return rv
-
- def inner(self, isolated=False):
- """Return an inner frame."""
- if isolated:
- return Frame(self.eval_ctx, level=self.symbols.level + 1)
- return Frame(self.eval_ctx, self)
-
- def soft(self):
- """Return a soft frame. A soft frame may not be modified as
- standalone thing as it shares the resources with the frame it
- was created of, but it's not a rootlevel frame any longer.
-
- This is only used to implement if-statements.
- """
- rv = self.copy()
- rv.rootlevel = False
- return rv
-
- __copy__ = copy
-
-
-class VisitorExit(RuntimeError):
- """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
-
-
-class DependencyFinderVisitor(NodeVisitor):
- """A visitor that collects filter and test calls."""
-
- def __init__(self):
- self.filters = set()
- self.tests = set()
-
- def visit_Filter(self, node):
- self.generic_visit(node)
- self.filters.add(node.name)
-
- def visit_Test(self, node):
- self.generic_visit(node)
- self.tests.add(node.name)
-
- def visit_Block(self, node):
- """Stop visiting at blocks."""
-
-
-class UndeclaredNameVisitor(NodeVisitor):
- """A visitor that checks if a name is accessed without being
- declared. This is different from the frame visitor as it will
- not stop at closure frames.
- """
-
- def __init__(self, names):
- self.names = set(names)
- self.undeclared = set()
-
- def visit_Name(self, node):
+
+ # a toplevel frame is the root + soft frames such as if conditions.
+ self.toplevel = False
+
+ # the root frame is basically just the outermost frame, so no if
+ # conditions. This information is used to optimize inheritance
+ # situations.
+ self.rootlevel = False
+
+ # in some dynamic inheritance situations the compiler needs to add
+ # write tests around output statements.
+ self.require_output_check = parent and parent.require_output_check
+
+ # inside some tags we are using a buffer rather than yield statements.
+ # this for example affects {% filter %} or {% macro %}. If a frame
+ # is buffered this variable points to the name of the list used as
+ # buffer.
+ self.buffer = None
+
+ # the name of the block we're in, otherwise None.
+ self.block = parent and parent.block or None
+
+ # the parent of this frame
+ self.parent = parent
+
+ if parent is not None:
+ self.buffer = parent.buffer
+
+ def copy(self):
+ """Create a copy of the current one."""
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.symbols = self.symbols.copy()
+ return rv
+
+ def inner(self, isolated=False):
+ """Return an inner frame."""
+ if isolated:
+ return Frame(self.eval_ctx, level=self.symbols.level + 1)
+ return Frame(self.eval_ctx, self)
+
+ def soft(self):
+ """Return a soft frame. A soft frame may not be modified as
+ standalone thing as it shares the resources with the frame it
+ was created of, but it's not a rootlevel frame any longer.
+
+ This is only used to implement if-statements.
+ """
+ rv = self.copy()
+ rv.rootlevel = False
+ return rv
+
+ __copy__ = copy
+
+
+class VisitorExit(RuntimeError):
+ """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
+
+
+class DependencyFinderVisitor(NodeVisitor):
+ """A visitor that collects filter and test calls."""
+
+ def __init__(self):
+ self.filters = set()
+ self.tests = set()
+
+ def visit_Filter(self, node):
+ self.generic_visit(node)
+ self.filters.add(node.name)
+
+ def visit_Test(self, node):
+ self.generic_visit(node)
+ self.tests.add(node.name)
+
+ def visit_Block(self, node):
+ """Stop visiting at blocks."""
+
+
+class UndeclaredNameVisitor(NodeVisitor):
+ """A visitor that checks if a name is accessed without being
+ declared. This is different from the frame visitor as it will
+ not stop at closure frames.
+ """
+
+ def __init__(self, names):
+ self.names = set(names)
+ self.undeclared = set()
+
+ def visit_Name(self, node):
if node.ctx == "load" and node.name in self.names:
- self.undeclared.add(node.name)
- if self.undeclared == self.names:
- raise VisitorExit()
- else:
- self.names.discard(node.name)
-
- def visit_Block(self, node):
- """Stop visiting a blocks."""
-
-
-class CompilerExit(Exception):
- """Raised if the compiler encountered a situation where it just
- doesn't make sense to further process the code. Any block that
- raises such an exception is not further processed.
- """
-
-
-class CodeGenerator(NodeVisitor):
+ self.undeclared.add(node.name)
+ if self.undeclared == self.names:
+ raise VisitorExit()
+ else:
+ self.names.discard(node.name)
+
+ def visit_Block(self, node):
+ """Stop visiting a blocks."""
+
+
+class CompilerExit(Exception):
+ """Raised if the compiler encountered a situation where it just
+ doesn't make sense to further process the code. Any block that
+ raises such an exception is not further processed.
+ """
+
+
+class CodeGenerator(NodeVisitor):
def __init__(
self, environment, name, filename, stream=None, defer_init=False, optimized=True
):
- if stream is None:
- stream = NativeStringIO()
- self.environment = environment
- self.name = name
- self.filename = filename
- self.stream = stream
- self.created_block_context = False
- self.defer_init = defer_init
- self.optimized = optimized
- if optimized:
- self.optimizer = Optimizer(environment)
-
- # aliases for imports
- self.import_aliases = {}
-
- # a registry for all blocks. Because blocks are moved out
- # into the global python scope they are registered here
- self.blocks = {}
-
- # the number of extends statements so far
- self.extends_so_far = 0
-
- # some templates have a rootlevel extends. In this case we
- # can safely assume that we're a child template and do some
- # more optimizations.
- self.has_known_extends = False
-
- # the current line number
- self.code_lineno = 1
-
- # registry of all filters and tests (global, not block local)
- self.tests = {}
- self.filters = {}
-
- # the debug information
- self.debug_info = []
- self._write_debug_info = None
-
- # the number of new lines before the next write()
- self._new_lines = 0
-
- # the line number of the last written statement
- self._last_line = 0
-
- # true if nothing was written so far.
- self._first_write = True
-
- # used by the `temporary_identifier` method to get new
- # unique, temporary identifier
- self._last_identifier = 0
-
- # the current indentation
- self._indentation = 0
-
- # Tracks toplevel assignments
- self._assign_stack = []
-
- # Tracks parameter definition blocks
- self._param_def_block = []
-
- # Tracks the current context.
+ if stream is None:
+ stream = NativeStringIO()
+ self.environment = environment
+ self.name = name
+ self.filename = filename
+ self.stream = stream
+ self.created_block_context = False
+ self.defer_init = defer_init
+ self.optimized = optimized
+ if optimized:
+ self.optimizer = Optimizer(environment)
+
+ # aliases for imports
+ self.import_aliases = {}
+
+ # a registry for all blocks. Because blocks are moved out
+ # into the global python scope they are registered here
+ self.blocks = {}
+
+ # the number of extends statements so far
+ self.extends_so_far = 0
+
+ # some templates have a rootlevel extends. In this case we
+ # can safely assume that we're a child template and do some
+ # more optimizations.
+ self.has_known_extends = False
+
+ # the current line number
+ self.code_lineno = 1
+
+ # registry of all filters and tests (global, not block local)
+ self.tests = {}
+ self.filters = {}
+
+ # the debug information
+ self.debug_info = []
+ self._write_debug_info = None
+
+ # the number of new lines before the next write()
+ self._new_lines = 0
+
+ # the line number of the last written statement
+ self._last_line = 0
+
+ # true if nothing was written so far.
+ self._first_write = True
+
+ # used by the `temporary_identifier` method to get new
+ # unique, temporary identifier
+ self._last_identifier = 0
+
+ # the current indentation
+ self._indentation = 0
+
+ # Tracks toplevel assignments
+ self._assign_stack = []
+
+ # Tracks parameter definition blocks
+ self._param_def_block = []
+
+ # Tracks the current context.
self._context_reference_stack = ["context"]
-
- # -- Various compilation helpers
-
- def fail(self, msg, lineno):
- """Fail with a :exc:`TemplateAssertionError`."""
- raise TemplateAssertionError(msg, lineno, self.name, self.filename)
-
- def temporary_identifier(self):
- """Get a new unique identifier."""
- self._last_identifier += 1
+
+ # -- Various compilation helpers
+
+ def fail(self, msg, lineno):
+ """Fail with a :exc:`TemplateAssertionError`."""
+ raise TemplateAssertionError(msg, lineno, self.name, self.filename)
+
+ def temporary_identifier(self):
+ """Get a new unique identifier."""
+ self._last_identifier += 1
return "t_%d" % self._last_identifier
-
- def buffer(self, frame):
- """Enable buffering for the frame from that point onwards."""
- frame.buffer = self.temporary_identifier()
+
+ def buffer(self, frame):
+ """Enable buffering for the frame from that point onwards."""
+ frame.buffer = self.temporary_identifier()
self.writeline("%s = []" % frame.buffer)
-
- def return_buffer_contents(self, frame, force_unescaped=False):
- """Return the buffer contents of the frame."""
- if not force_unescaped:
- if frame.eval_ctx.volatile:
+
+ def return_buffer_contents(self, frame, force_unescaped=False):
+ """Return the buffer contents of the frame."""
+ if not force_unescaped:
+ if frame.eval_ctx.volatile:
self.writeline("if context.eval_ctx.autoescape:")
- self.indent()
+ self.indent()
self.writeline("return Markup(concat(%s))" % frame.buffer)
- self.outdent()
+ self.outdent()
self.writeline("else:")
- self.indent()
+ self.indent()
self.writeline("return concat(%s)" % frame.buffer)
- self.outdent()
- return
- elif frame.eval_ctx.autoescape:
+ self.outdent()
+ return
+ elif frame.eval_ctx.autoescape:
self.writeline("return Markup(concat(%s))" % frame.buffer)
- return
+ return
self.writeline("return concat(%s)" % frame.buffer)
-
- def indent(self):
- """Indent by one."""
- self._indentation += 1
-
- def outdent(self, step=1):
- """Outdent by step."""
- self._indentation -= step
-
- def start_write(self, frame, node=None):
- """Yield or write into the frame buffer."""
- if frame.buffer is None:
+
+ def indent(self):
+ """Indent by one."""
+ self._indentation += 1
+
+ def outdent(self, step=1):
+ """Outdent by step."""
+ self._indentation -= step
+
+ def start_write(self, frame, node=None):
+ """Yield or write into the frame buffer."""
+ if frame.buffer is None:
self.writeline("yield ", node)
- else:
+ else:
self.writeline("%s.append(" % frame.buffer, node)
-
- def end_write(self, frame):
- """End the writing process started by `start_write`."""
- if frame.buffer is not None:
+
+ def end_write(self, frame):
+ """End the writing process started by `start_write`."""
+ if frame.buffer is not None:
self.write(")")
-
- def simple_write(self, s, frame, node=None):
- """Simple shortcut for start_write + write + end_write."""
- self.start_write(frame, node)
- self.write(s)
- self.end_write(frame)
-
- def blockvisit(self, nodes, frame):
- """Visit a list of nodes as block in a frame. If the current frame
- is no buffer a dummy ``if 0: yield None`` is written automatically.
- """
- try:
+
+ def simple_write(self, s, frame, node=None):
+ """Simple shortcut for start_write + write + end_write."""
+ self.start_write(frame, node)
+ self.write(s)
+ self.end_write(frame)
+
+ def blockvisit(self, nodes, frame):
+ """Visit a list of nodes as block in a frame. If the current frame
+ is no buffer a dummy ``if 0: yield None`` is written automatically.
+ """
+ try:
self.writeline("pass")
- for node in nodes:
- self.visit(node, frame)
- except CompilerExit:
- pass
-
- def write(self, x):
- """Write a string into the output stream."""
- if self._new_lines:
- if not self._first_write:
+ for node in nodes:
+ self.visit(node, frame)
+ except CompilerExit:
+ pass
+
+ def write(self, x):
+ """Write a string into the output stream."""
+ if self._new_lines:
+ if not self._first_write:
self.stream.write("\n" * self._new_lines)
- self.code_lineno += self._new_lines
- if self._write_debug_info is not None:
+ self.code_lineno += self._new_lines
+ if self._write_debug_info is not None:
self.debug_info.append((self._write_debug_info, self.code_lineno))
- self._write_debug_info = None
- self._first_write = False
+ self._write_debug_info = None
+ self._first_write = False
self.stream.write(" " * self._indentation)
- self._new_lines = 0
- self.stream.write(x)
-
- def writeline(self, x, node=None, extra=0):
- """Combination of newline and write."""
- self.newline(node, extra)
- self.write(x)
-
- def newline(self, node=None, extra=0):
- """Add one or more newlines before the next write."""
- self._new_lines = max(self._new_lines, 1 + extra)
- if node is not None and node.lineno != self._last_line:
- self._write_debug_info = node.lineno
- self._last_line = node.lineno
-
- def signature(self, node, frame, extra_kwargs=None):
- """Writes a function call to the stream for the current node.
- A leading comma is added automatically. The extra keyword
- arguments may not include python keywords otherwise a syntax
+ self._new_lines = 0
+ self.stream.write(x)
+
+ def writeline(self, x, node=None, extra=0):
+ """Combination of newline and write."""
+ self.newline(node, extra)
+ self.write(x)
+
+ def newline(self, node=None, extra=0):
+ """Add one or more newlines before the next write."""
+ self._new_lines = max(self._new_lines, 1 + extra)
+ if node is not None and node.lineno != self._last_line:
+ self._write_debug_info = node.lineno
+ self._last_line = node.lineno
+
+ def signature(self, node, frame, extra_kwargs=None):
+ """Writes a function call to the stream for the current node.
+ A leading comma is added automatically. The extra keyword
+ arguments may not include python keywords otherwise a syntax
error could occur. The extra keyword arguments should be given
- as python dict.
- """
- # if any of the given keyword arguments is a python keyword
- # we have to make sure that no invalid call is created.
- kwarg_workaround = False
- for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
- if is_python_keyword(kwarg):
- kwarg_workaround = True
- break
-
- for arg in node.args:
+ as python dict.
+ """
+ # if any of the given keyword arguments is a python keyword
+ # we have to make sure that no invalid call is created.
+ kwarg_workaround = False
+ for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
+ if is_python_keyword(kwarg):
+ kwarg_workaround = True
+ break
+
+ for arg in node.args:
self.write(", ")
- self.visit(arg, frame)
-
- if not kwarg_workaround:
- for kwarg in node.kwargs:
+ self.visit(arg, frame)
+
+ if not kwarg_workaround:
+ for kwarg in node.kwargs:
self.write(", ")
- self.visit(kwarg, frame)
- if extra_kwargs is not None:
- for key, value in iteritems(extra_kwargs):
+ self.visit(kwarg, frame)
+ if extra_kwargs is not None:
+ for key, value in iteritems(extra_kwargs):
self.write(", %s=%s" % (key, value))
- if node.dyn_args:
+ if node.dyn_args:
self.write(", *")
- self.visit(node.dyn_args, frame)
-
- if kwarg_workaround:
- if node.dyn_kwargs is not None:
+ self.visit(node.dyn_args, frame)
+
+ if kwarg_workaround:
+ if node.dyn_kwargs is not None:
self.write(", **dict({")
- else:
+ else:
self.write(", **{")
- for kwarg in node.kwargs:
+ for kwarg in node.kwargs:
self.write("%r: " % kwarg.key)
- self.visit(kwarg.value, frame)
+ self.visit(kwarg.value, frame)
self.write(", ")
- if extra_kwargs is not None:
- for key, value in iteritems(extra_kwargs):
+ if extra_kwargs is not None:
+ for key, value in iteritems(extra_kwargs):
self.write("%r: %s, " % (key, value))
- if node.dyn_kwargs is not None:
+ if node.dyn_kwargs is not None:
self.write("}, **")
- self.visit(node.dyn_kwargs, frame)
+ self.visit(node.dyn_kwargs, frame)
self.write(")")
- else:
+ else:
self.write("}")
-
- elif node.dyn_kwargs is not None:
+
+ elif node.dyn_kwargs is not None:
self.write(", **")
- self.visit(node.dyn_kwargs, frame)
-
- def pull_dependencies(self, nodes):
- """Pull all the dependencies."""
- visitor = DependencyFinderVisitor()
- for node in nodes:
- visitor.visit(node)
+ self.visit(node.dyn_kwargs, frame)
+
+ def pull_dependencies(self, nodes):
+ """Pull all the dependencies."""
+ visitor = DependencyFinderVisitor()
+ for node in nodes:
+ visitor.visit(node)
for dependency in "filters", "tests":
- mapping = getattr(self, dependency)
- for name in getattr(visitor, dependency):
- if name not in mapping:
- mapping[name] = self.temporary_identifier()
+ mapping = getattr(self, dependency)
+ for name in getattr(visitor, dependency):
+ if name not in mapping:
+ mapping[name] = self.temporary_identifier()
self.writeline(
"%s = environment.%s[%r]" % (mapping[name], dependency, name)
)
-
- def enter_frame(self, frame):
- undefs = []
- for target, (action, param) in iteritems(frame.symbols.loads):
- if action == VAR_LOAD_PARAMETER:
- pass
- elif action == VAR_LOAD_RESOLVE:
+
+ def enter_frame(self, frame):
+ undefs = []
+ for target, (action, param) in iteritems(frame.symbols.loads):
+ if action == VAR_LOAD_PARAMETER:
+ pass
+ elif action == VAR_LOAD_RESOLVE:
self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param))
- elif action == VAR_LOAD_ALIAS:
+ elif action == VAR_LOAD_ALIAS:
self.writeline("%s = %s" % (target, param))
- elif action == VAR_LOAD_UNDEFINED:
- undefs.append(target)
- else:
+ elif action == VAR_LOAD_UNDEFINED:
+ undefs.append(target)
+ else:
raise NotImplementedError("unknown load instruction")
- if undefs:
+ if undefs:
self.writeline("%s = missing" % " = ".join(undefs))
-
- def leave_frame(self, frame, with_python_scope=False):
- if not with_python_scope:
- undefs = []
- for target, _ in iteritems(frame.symbols.loads):
- undefs.append(target)
- if undefs:
+
+ def leave_frame(self, frame, with_python_scope=False):
+ if not with_python_scope:
+ undefs = []
+ for target, _ in iteritems(frame.symbols.loads):
+ undefs.append(target)
+ if undefs:
self.writeline("%s = missing" % " = ".join(undefs))
-
- def func(self, name):
- if self.environment.is_async:
+
+ def func(self, name):
+ if self.environment.is_async:
return "async def %s" % name
return "def %s" % name
-
- def macro_body(self, node, frame):
- """Dump the function def of a macro or call block."""
- frame = frame.inner()
- frame.symbols.analyze_node(node)
- macro_ref = MacroRef(node)
-
- explicit_caller = None
- skip_special_params = set()
- args = []
- for idx, arg in enumerate(node.args):
+
+ def macro_body(self, node, frame):
+ """Dump the function def of a macro or call block."""
+ frame = frame.inner()
+ frame.symbols.analyze_node(node)
+ macro_ref = MacroRef(node)
+
+ explicit_caller = None
+ skip_special_params = set()
+ args = []
+ for idx, arg in enumerate(node.args):
if arg.name == "caller":
- explicit_caller = idx
+ explicit_caller = idx
if arg.name in ("kwargs", "varargs"):
- skip_special_params.add(arg.name)
- args.append(frame.symbols.ref(arg.name))
-
+ skip_special_params.add(arg.name)
+ args.append(frame.symbols.ref(arg.name))
+
undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
-
+
if "caller" in undeclared:
# In older Jinja versions there was a bug that allowed caller
- # to retain the special behavior even if it was mentioned in
- # the argument list. However thankfully this was only really
- # working if it was the last argument. So we are explicitly
- # checking this now and error out if it is anywhere else in
- # the argument list.
- if explicit_caller is not None:
- try:
- node.defaults[explicit_caller - len(node.args)]
- except IndexError:
+ # to retain the special behavior even if it was mentioned in
+ # the argument list. However thankfully this was only really
+ # working if it was the last argument. So we are explicitly
+ # checking this now and error out if it is anywhere else in
+ # the argument list.
+ if explicit_caller is not None:
+ try:
+ node.defaults[explicit_caller - len(node.args)]
+ except IndexError:
self.fail(
"When defining macros or call blocks the "
'special "caller" argument must be omitted '
"or be given a default.",
node.lineno,
)
- else:
+ else:
args.append(frame.symbols.declare_parameter("caller"))
- macro_ref.accesses_caller = True
+ macro_ref.accesses_caller = True
if "kwargs" in undeclared and "kwargs" not in skip_special_params:
args.append(frame.symbols.declare_parameter("kwargs"))
- macro_ref.accesses_kwargs = True
+ macro_ref.accesses_kwargs = True
if "varargs" in undeclared and "varargs" not in skip_special_params:
args.append(frame.symbols.declare_parameter("varargs"))
- macro_ref.accesses_varargs = True
-
- # macros are delayed, they never require output checks
- frame.require_output_check = False
- frame.symbols.analyze_node(node)
+ macro_ref.accesses_varargs = True
+
+ # macros are delayed, they never require output checks
+ frame.require_output_check = False
+ frame.symbols.analyze_node(node)
self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node)
- self.indent()
-
- self.buffer(frame)
- self.enter_frame(frame)
-
- self.push_parameter_definitions(frame)
- for idx, arg in enumerate(node.args):
- ref = frame.symbols.ref(arg.name)
+ self.indent()
+
+ self.buffer(frame)
+ self.enter_frame(frame)
+
+ self.push_parameter_definitions(frame)
+ for idx, arg in enumerate(node.args):
+ ref = frame.symbols.ref(arg.name)
self.writeline("if %s is missing:" % ref)
- self.indent()
- try:
- default = node.defaults[idx - len(node.args)]
- except IndexError:
+ self.indent()
+ try:
+ default = node.defaults[idx - len(node.args)]
+ except IndexError:
self.writeline(
"%s = undefined(%r, name=%r)"
% (ref, "parameter %r was not provided" % arg.name, arg.name)
)
- else:
+ else:
self.writeline("%s = " % ref)
- self.visit(default, frame)
- self.mark_parameter_stored(ref)
- self.outdent()
- self.pop_parameter_definitions()
-
- self.blockvisit(node.body, frame)
- self.return_buffer_contents(frame, force_unescaped=True)
- self.leave_frame(frame, with_python_scope=True)
- self.outdent()
-
- return frame, macro_ref
-
- def macro_def(self, macro_ref, frame):
- """Dump the macro definition for the def created by macro_body."""
+ self.visit(default, frame)
+ self.mark_parameter_stored(ref)
+ self.outdent()
+ self.pop_parameter_definitions()
+
+ self.blockvisit(node.body, frame)
+ self.return_buffer_contents(frame, force_unescaped=True)
+ self.leave_frame(frame, with_python_scope=True)
+ self.outdent()
+
+ return frame, macro_ref
+
+ def macro_def(self, macro_ref, frame):
+ """Dump the macro definition for the def created by macro_body."""
arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
name = getattr(macro_ref.node, "name", None)
- if len(macro_ref.node.args) == 1:
+ if len(macro_ref.node.args) == 1:
arg_tuple += ","
self.write(
"Macro(environment, macro, %r, (%s), %r, %r, %r, "
@@ -601,213 +601,213 @@ class CodeGenerator(NodeVisitor):
macro_ref.accesses_caller,
)
)
-
- def position(self, node):
- """Return a human readable position for the node."""
+
+ def position(self, node):
+ """Return a human readable position for the node."""
rv = "line %d" % node.lineno
- if self.name is not None:
+ if self.name is not None:
rv += " in " + repr(self.name)
- return rv
-
- def dump_local_context(self, frame):
+ return rv
+
+ def dump_local_context(self, frame):
return "{%s}" % ", ".join(
"%r: %s" % (name, target)
for name, target in iteritems(frame.symbols.dump_stores())
)
-
- def write_commons(self):
- """Writes a common preamble that is used by root and block functions.
- Primarily this sets up common local helpers and enforces a generator
- through a dead branch.
- """
+
+ def write_commons(self):
+ """Writes a common preamble that is used by root and block functions.
+ Primarily this sets up common local helpers and enforces a generator
+ through a dead branch.
+ """
self.writeline("resolve = context.resolve_or_missing")
self.writeline("undefined = environment.undefined")
# always use the standard Undefined class for the implicit else of
# conditional expressions
self.writeline("cond_expr_undefined = Undefined")
self.writeline("if 0: yield None")
-
- def push_parameter_definitions(self, frame):
- """Pushes all parameter targets from the given frame into a local
- stack that permits tracking of yet to be assigned parameters. In
- particular this enables the optimization from `visit_Name` to skip
- undefined expressions for parameters in macros as macros can reference
- otherwise unbound parameters.
- """
- self._param_def_block.append(frame.symbols.dump_param_targets())
-
- def pop_parameter_definitions(self):
- """Pops the current parameter definitions set."""
- self._param_def_block.pop()
-
- def mark_parameter_stored(self, target):
- """Marks a parameter in the current parameter definitions as stored.
- This will skip the enforced undefined checks.
- """
- if self._param_def_block:
- self._param_def_block[-1].discard(target)
-
- def push_context_reference(self, target):
- self._context_reference_stack.append(target)
-
- def pop_context_reference(self):
- self._context_reference_stack.pop()
-
- def get_context_ref(self):
- return self._context_reference_stack[-1]
-
- def get_resolve_func(self):
- target = self._context_reference_stack[-1]
+
+ def push_parameter_definitions(self, frame):
+ """Pushes all parameter targets from the given frame into a local
+ stack that permits tracking of yet to be assigned parameters. In
+ particular this enables the optimization from `visit_Name` to skip
+ undefined expressions for parameters in macros as macros can reference
+ otherwise unbound parameters.
+ """
+ self._param_def_block.append(frame.symbols.dump_param_targets())
+
+ def pop_parameter_definitions(self):
+ """Pops the current parameter definitions set."""
+ self._param_def_block.pop()
+
+ def mark_parameter_stored(self, target):
+ """Marks a parameter in the current parameter definitions as stored.
+ This will skip the enforced undefined checks.
+ """
+ if self._param_def_block:
+ self._param_def_block[-1].discard(target)
+
+ def push_context_reference(self, target):
+ self._context_reference_stack.append(target)
+
+ def pop_context_reference(self):
+ self._context_reference_stack.pop()
+
+ def get_context_ref(self):
+ return self._context_reference_stack[-1]
+
+ def get_resolve_func(self):
+ target = self._context_reference_stack[-1]
if target == "context":
return "resolve"
return "%s.resolve" % target
-
- def derive_context(self, frame):
+
+ def derive_context(self, frame):
return "%s.derived(%s)" % (
- self.get_context_ref(),
- self.dump_local_context(frame),
- )
-
- def parameter_is_undeclared(self, target):
- """Checks if a given target is an undeclared parameter."""
- if not self._param_def_block:
- return False
- return target in self._param_def_block[-1]
-
- def push_assign_tracking(self):
- """Pushes a new layer for assignment tracking."""
- self._assign_stack.append(set())
-
- def pop_assign_tracking(self, frame):
- """Pops the topmost level for assignment tracking and updates the
- context variables if necessary.
- """
- vars = self._assign_stack.pop()
- if not frame.toplevel or not vars:
- return
+ self.get_context_ref(),
+ self.dump_local_context(frame),
+ )
+
+ def parameter_is_undeclared(self, target):
+ """Checks if a given target is an undeclared parameter."""
+ if not self._param_def_block:
+ return False
+ return target in self._param_def_block[-1]
+
+ def push_assign_tracking(self):
+ """Pushes a new layer for assignment tracking."""
+ self._assign_stack.append(set())
+
+ def pop_assign_tracking(self, frame):
+ """Pops the topmost level for assignment tracking and updates the
+ context variables if necessary.
+ """
+ vars = self._assign_stack.pop()
+ if not frame.toplevel or not vars:
+ return
public_names = [x for x in vars if x[:1] != "_"]
- if len(vars) == 1:
- name = next(iter(vars))
- ref = frame.symbols.ref(name)
+ if len(vars) == 1:
+ name = next(iter(vars))
+ ref = frame.symbols.ref(name)
self.writeline("context.vars[%r] = %s" % (name, ref))
- else:
+ else:
self.writeline("context.vars.update({")
- for idx, name in enumerate(vars):
- if idx:
+ for idx, name in enumerate(vars):
+ if idx:
self.write(", ")
- ref = frame.symbols.ref(name)
+ ref = frame.symbols.ref(name)
self.write("%r: %s" % (name, ref))
self.write("})")
- if public_names:
- if len(public_names) == 1:
+ if public_names:
+ if len(public_names) == 1:
self.writeline("context.exported_vars.add(%r)" % public_names[0])
- else:
+ else:
self.writeline(
"context.exported_vars.update((%s))"
% ", ".join(imap(repr, public_names))
)
-
- # -- Statement Visitors
-
- def visit_Template(self, node, frame=None):
+
+ # -- Statement Visitors
+
+ def visit_Template(self, node, frame=None):
assert frame is None, "no root frame allowed"
- eval_ctx = EvalContext(self.environment, self.name)
-
+ eval_ctx = EvalContext(self.environment, self.name)
+
from .runtime import exported
-
+
self.writeline("from __future__ import %s" % ", ".join(code_features))
self.writeline("from jinja2.runtime import " + ", ".join(exported))
- if self.environment.is_async:
+ if self.environment.is_async:
self.writeline(
"from jinja2.asyncsupport import auto_await, "
"auto_aiter, AsyncLoopContext"
)
-
- # if we want a deferred initialization we cannot move the
- # environment into a local name
+
+ # if we want a deferred initialization we cannot move the
+ # environment into a local name
envenv = not self.defer_init and ", environment=environment" or ""
-
- # do we have an extends tag at all? If not, we can save some
- # overhead by just not processing any inheritance code.
- have_extends = node.find(nodes.Extends) is not None
-
- # find all blocks
- for block in node.find_all(nodes.Block):
- if block.name in self.blocks:
+
+ # do we have an extends tag at all? If not, we can save some
+ # overhead by just not processing any inheritance code.
+ have_extends = node.find(nodes.Extends) is not None
+
+ # find all blocks
+ for block in node.find_all(nodes.Block):
+ if block.name in self.blocks:
self.fail("block %r defined twice" % block.name, block.lineno)
- self.blocks[block.name] = block
-
- # find all imports and import them
- for import_ in node.find_all(nodes.ImportedName):
- if import_.importname not in self.import_aliases:
- imp = import_.importname
- self.import_aliases[imp] = alias = self.temporary_identifier()
+ self.blocks[block.name] = block
+
+ # find all imports and import them
+ for import_ in node.find_all(nodes.ImportedName):
+ if import_.importname not in self.import_aliases:
+ imp = import_.importname
+ self.import_aliases[imp] = alias = self.temporary_identifier()
if "." in imp:
module, obj = imp.rsplit(".", 1)
self.writeline("from %s import %s as %s" % (module, obj, alias))
- else:
+ else:
self.writeline("import %s as %s" % (imp, alias))
-
- # add the load name
+
+ # add the load name
self.writeline("name = %r" % self.name)
-
- # generate the root render function.
+
+ # generate the root render function.
self.writeline(
"%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1
)
- self.indent()
- self.write_commons()
-
- # process the root
- frame = Frame(eval_ctx)
+ self.indent()
+ self.write_commons()
+
+ # process the root
+ frame = Frame(eval_ctx)
if "self" in find_undeclared(node.body, ("self",)):
ref = frame.symbols.declare_parameter("self")
self.writeline("%s = TemplateReference(context)" % ref)
- frame.symbols.analyze_node(node)
- frame.toplevel = frame.rootlevel = True
- frame.require_output_check = have_extends and not self.has_known_extends
- if have_extends:
+ frame.symbols.analyze_node(node)
+ frame.toplevel = frame.rootlevel = True
+ frame.require_output_check = have_extends and not self.has_known_extends
+ if have_extends:
self.writeline("parent_template = None")
- self.enter_frame(frame)
- self.pull_dependencies(node.body)
- self.blockvisit(node.body, frame)
- self.leave_frame(frame, with_python_scope=True)
- self.outdent()
-
- # make sure that the parent root is called.
- if have_extends:
- if not self.has_known_extends:
- self.indent()
+ self.enter_frame(frame)
+ self.pull_dependencies(node.body)
+ self.blockvisit(node.body, frame)
+ self.leave_frame(frame, with_python_scope=True)
+ self.outdent()
+
+ # make sure that the parent root is called.
+ if have_extends:
+ if not self.has_known_extends:
+ self.indent()
self.writeline("if parent_template is not None:")
- self.indent()
- if supports_yield_from and not self.environment.is_async:
+ self.indent()
+ if supports_yield_from and not self.environment.is_async:
self.writeline("yield from parent_template.root_render_func(context)")
- else:
+ else:
self.writeline(
"%sfor event in parent_template."
"root_render_func(context):"
% (self.environment.is_async and "async " or "")
)
- self.indent()
+ self.indent()
self.writeline("yield event")
- self.outdent()
- self.outdent(1 + (not self.has_known_extends))
-
- # at this point we now have the blocks collected and can visit them too.
- for name, block in iteritems(self.blocks):
+ self.outdent()
+ self.outdent(1 + (not self.has_known_extends))
+
+ # at this point we now have the blocks collected and can visit them too.
+ for name, block in iteritems(self.blocks):
self.writeline(
"%s(context, missing=missing%s):"
% (self.func("block_" + name), envenv),
block,
1,
)
- self.indent()
- self.write_commons()
- # It's important that we do not make this frame a child of the
- # toplevel template. This would cause a variety of
- # interesting issues with identifier tracking.
- block_frame = Frame(eval_ctx)
+ self.indent()
+ self.write_commons()
+ # It's important that we do not make this frame a child of the
+ # toplevel template. This would cause a variety of
+ # interesting issues with identifier tracking.
+ block_frame = Frame(eval_ctx)
undeclared = find_undeclared(block.body, ("self", "super"))
if "self" in undeclared:
ref = block_frame.symbols.declare_parameter("self")
@@ -815,42 +815,42 @@ class CodeGenerator(NodeVisitor):
if "super" in undeclared:
ref = block_frame.symbols.declare_parameter("super")
self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name))
- block_frame.symbols.analyze_node(block)
- block_frame.block = name
- self.enter_frame(block_frame)
- self.pull_dependencies(block.body)
- self.blockvisit(block.body, block_frame)
- self.leave_frame(block_frame, with_python_scope=True)
- self.outdent()
-
+ block_frame.symbols.analyze_node(block)
+ block_frame.block = name
+ self.enter_frame(block_frame)
+ self.pull_dependencies(block.body)
+ self.blockvisit(block.body, block_frame)
+ self.leave_frame(block_frame, with_python_scope=True)
+ self.outdent()
+
self.writeline(
"blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks),
extra=1,
)
-
- # add a function that returns the debug info
+
+ # add a function that returns the debug info
self.writeline(
"debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info)
)
-
- def visit_Block(self, node, frame):
- """Call a block and register it for the template."""
- level = 0
- if frame.toplevel:
- # if we know that we are a child template, there is no need to
- # check if we are one
- if self.has_known_extends:
- return
- if self.extends_so_far > 0:
+
+ def visit_Block(self, node, frame):
+ """Call a block and register it for the template."""
+ level = 0
+ if frame.toplevel:
+ # if we know that we are a child template, there is no need to
+ # check if we are one
+ if self.has_known_extends:
+ return
+ if self.extends_so_far > 0:
self.writeline("if parent_template is None:")
- self.indent()
- level += 1
-
- if node.scoped:
- context = self.derive_context(frame)
- else:
- context = self.get_context_ref()
-
+ self.indent()
+ level += 1
+
+ if node.scoped:
+ context = self.derive_context(frame)
+ else:
+ context = self.get_context_ref()
+
if (
supports_yield_from
and not self.environment.is_async
@@ -859,132 +859,132 @@ class CodeGenerator(NodeVisitor):
self.writeline(
"yield from context.blocks[%r][0](%s)" % (node.name, context), node
)
- else:
+ else:
loop = self.environment.is_async and "async for" or "for"
self.writeline(
"%s event in context.blocks[%r][0](%s):" % (loop, node.name, context),
node,
)
- self.indent()
+ self.indent()
self.simple_write("event", frame)
- self.outdent()
-
- self.outdent(level)
-
- def visit_Extends(self, node, frame):
- """Calls the extender."""
- if not frame.toplevel:
+ self.outdent()
+
+ self.outdent(level)
+
+ def visit_Extends(self, node, frame):
+ """Calls the extender."""
+ if not frame.toplevel:
self.fail("cannot use extend from a non top-level scope", node.lineno)
-
- # if the number of extends statements in general is zero so
- # far, we don't have to add a check if something extended
- # the template before this one.
- if self.extends_so_far > 0:
-
- # if we have a known extends we just add a template runtime
- # error into the generated code. We could catch that at compile
- # time too, but i welcome it not to confuse users by throwing the
- # same error at different times just "because we can".
- if not self.has_known_extends:
+
+ # if the number of extends statements in general is zero so
+ # far, we don't have to add a check if something extended
+ # the template before this one.
+ if self.extends_so_far > 0:
+
+ # if we have a known extends we just add a template runtime
+ # error into the generated code. We could catch that at compile
+ # time too, but i welcome it not to confuse users by throwing the
+ # same error at different times just "because we can".
+ if not self.has_known_extends:
self.writeline("if parent_template is not None:")
- self.indent()
+ self.indent()
self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times")
-
- # if we have a known extends already we don't need that code here
- # as we know that the template execution will end here.
- if self.has_known_extends:
- raise CompilerExit()
- else:
- self.outdent()
-
+
+ # if we have a known extends already we don't need that code here
+ # as we know that the template execution will end here.
+ if self.has_known_extends:
+ raise CompilerExit()
+ else:
+ self.outdent()
+
self.writeline("parent_template = environment.get_template(", node)
- self.visit(node.template, frame)
+ self.visit(node.template, frame)
self.write(", %r)" % self.name)
self.writeline(
"for name, parent_block in parent_template.blocks.%s():" % dict_item_iter
)
- self.indent()
+ self.indent()
self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
- self.outdent()
-
- # if this extends statement was in the root level we can take
- # advantage of that information and simplify the generated code
- # in the top level from this point onwards
- if frame.rootlevel:
- self.has_known_extends = True
-
- # and now we have one more
- self.extends_so_far += 1
-
- def visit_Include(self, node, frame):
- """Handles includes."""
- if node.ignore_missing:
+ self.outdent()
+
+ # if this extends statement was in the root level we can take
+ # advantage of that information and simplify the generated code
+ # in the top level from this point onwards
+ if frame.rootlevel:
+ self.has_known_extends = True
+
+ # and now we have one more
+ self.extends_so_far += 1
+
+ def visit_Include(self, node, frame):
+ """Handles includes."""
+ if node.ignore_missing:
self.writeline("try:")
- self.indent()
-
+ self.indent()
+
func_name = "get_or_select_template"
- if isinstance(node.template, nodes.Const):
- if isinstance(node.template.value, string_types):
+ if isinstance(node.template, nodes.Const):
+ if isinstance(node.template.value, string_types):
func_name = "get_template"
- elif isinstance(node.template.value, (tuple, list)):
+ elif isinstance(node.template.value, (tuple, list)):
func_name = "select_template"
- elif isinstance(node.template, (nodes.Tuple, nodes.List)):
+ elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name = "select_template"
-
+
self.writeline("template = environment.%s(" % func_name, node)
- self.visit(node.template, frame)
+ self.visit(node.template, frame)
self.write(", %r)" % self.name)
- if node.ignore_missing:
- self.outdent()
+ if node.ignore_missing:
+ self.outdent()
self.writeline("except TemplateNotFound:")
- self.indent()
+ self.indent()
self.writeline("pass")
- self.outdent()
+ self.outdent()
self.writeline("else:")
- self.indent()
-
- skip_event_yield = False
- if node.with_context:
+ self.indent()
+
+ skip_event_yield = False
+ if node.with_context:
loop = self.environment.is_async and "async for" or "for"
self.writeline(
"%s event in template.root_render_func("
"template.new_context(context.get_all(), True, "
"%s)):" % (loop, self.dump_local_context(frame))
)
- elif self.environment.is_async:
+ elif self.environment.is_async:
self.writeline(
"for event in (await "
"template._get_default_module_async())"
"._body_stream:"
)
- else:
- if supports_yield_from:
+ else:
+ if supports_yield_from:
self.writeline("yield from template._get_default_module()._body_stream")
- skip_event_yield = True
- else:
+ skip_event_yield = True
+ else:
self.writeline(
"for event in template._get_default_module()._body_stream:"
)
-
- if not skip_event_yield:
- self.indent()
+
+ if not skip_event_yield:
+ self.indent()
self.simple_write("event", frame)
- self.outdent()
-
- if node.ignore_missing:
- self.outdent()
-
- def visit_Import(self, node, frame):
- """Visit regular imports."""
+ self.outdent()
+
+ if node.ignore_missing:
+ self.outdent()
+
+ def visit_Import(self, node, frame):
+ """Visit regular imports."""
self.writeline("%s = " % frame.symbols.ref(node.target), node)
- if frame.toplevel:
+ if frame.toplevel:
self.write("context.vars[%r] = " % node.target)
- if self.environment.is_async:
+ if self.environment.is_async:
self.write("await ")
self.write("environment.get_template(")
- self.visit(node.template, frame)
+ self.visit(node.template, frame)
self.write(", %r)." % self.name)
- if node.with_context:
+ if node.with_context:
self.write(
"make_module%s(context.get_all(), True, %s)"
% (
@@ -992,23 +992,23 @@ class CodeGenerator(NodeVisitor):
self.dump_local_context(frame),
)
)
- elif self.environment.is_async:
+ elif self.environment.is_async:
self.write("_get_default_module_async()")
- else:
+ else:
self.write("_get_default_module()")
if frame.toplevel and not node.target.startswith("_"):
self.writeline("context.exported_vars.discard(%r)" % node.target)
-
- def visit_FromImport(self, node, frame):
- """Visit named imports."""
- self.newline(node)
+
+ def visit_FromImport(self, node, frame):
+ """Visit named imports."""
+ self.newline(node)
self.write(
"included_template = %senvironment.get_template("
% (self.environment.is_async and "await " or "")
)
- self.visit(node.template, frame)
+ self.visit(node.template, frame)
self.write(", %r)." % self.name)
- if node.with_context:
+ if node.with_context:
self.write(
"make_module%s(context.get_all(), True, %s)"
% (
@@ -1016,24 +1016,24 @@ class CodeGenerator(NodeVisitor):
self.dump_local_context(frame),
)
)
- elif self.environment.is_async:
+ elif self.environment.is_async:
self.write("_get_default_module_async()")
- else:
+ else:
self.write("_get_default_module()")
-
- var_names = []
- discarded_names = []
- for name in node.names:
- if isinstance(name, tuple):
- name, alias = name
- else:
- alias = name
+
+ var_names = []
+ discarded_names = []
+ for name in node.names:
+ if isinstance(name, tuple):
+ name, alias = name
+ else:
+ alias = name
self.writeline(
"%s = getattr(included_template, "
"%r, missing)" % (frame.symbols.ref(alias), name)
)
self.writeline("if %s is missing:" % frame.symbols.ref(alias))
- self.indent()
+ self.indent()
self.writeline(
"%s = undefined(%r %% "
"included_template.__name__, "
@@ -1046,241 +1046,241 @@ class CodeGenerator(NodeVisitor):
name,
)
)
- self.outdent()
- if frame.toplevel:
- var_names.append(alias)
+ self.outdent()
+ if frame.toplevel:
+ var_names.append(alias)
if not alias.startswith("_"):
- discarded_names.append(alias)
-
- if var_names:
- if len(var_names) == 1:
- name = var_names[0]
+ discarded_names.append(alias)
+
+ if var_names:
+ if len(var_names) == 1:
+ name = var_names[0]
self.writeline(
"context.vars[%r] = %s" % (name, frame.symbols.ref(name))
)
- else:
+ else:
self.writeline(
"context.vars.update({%s})"
% ", ".join(
"%r: %s" % (name, frame.symbols.ref(name)) for name in var_names
)
)
- if discarded_names:
- if len(discarded_names) == 1:
+ if discarded_names:
+ if len(discarded_names) == 1:
self.writeline("context.exported_vars.discard(%r)" % discarded_names[0])
- else:
+ else:
self.writeline(
"context.exported_vars.difference_"
"update((%s))" % ", ".join(imap(repr, discarded_names))
)
-
- def visit_For(self, node, frame):
- loop_frame = frame.inner()
- test_frame = frame.inner()
- else_frame = frame.inner()
-
- # try to figure out if we have an extended loop. An extended loop
- # is necessary if the loop is in recursive mode if the special loop
- # variable is accessed in the body.
+
+ def visit_For(self, node, frame):
+ loop_frame = frame.inner()
+ test_frame = frame.inner()
+ else_frame = frame.inner()
+
+ # try to figure out if we have an extended loop. An extended loop
+ # is necessary if the loop is in recursive mode if the special loop
+ # variable is accessed in the body.
extended_loop = node.recursive or "loop" in find_undeclared(
node.iter_child_nodes(only=("body",)), ("loop",)
)
-
- loop_ref = None
- if extended_loop:
+
+ loop_ref = None
+ if extended_loop:
loop_ref = loop_frame.symbols.declare_parameter("loop")
-
+
loop_frame.symbols.analyze_node(node, for_branch="body")
- if node.else_:
+ if node.else_:
else_frame.symbols.analyze_node(node, for_branch="else")
-
- if node.test:
- loop_filter_func = self.temporary_identifier()
+
+ if node.test:
+ loop_filter_func = self.temporary_identifier()
test_frame.symbols.analyze_node(node, for_branch="test")
self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test)
- self.indent()
- self.enter_frame(test_frame)
+ self.indent()
+ self.enter_frame(test_frame)
self.writeline(self.environment.is_async and "async for " or "for ")
- self.visit(node.target, loop_frame)
+ self.visit(node.target, loop_frame)
self.write(" in ")
self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter")
self.write(":")
- self.indent()
+ self.indent()
self.writeline("if ", node.test)
- self.visit(node.test, test_frame)
+ self.visit(node.test, test_frame)
self.write(":")
- self.indent()
+ self.indent()
self.writeline("yield ")
- self.visit(node.target, loop_frame)
- self.outdent(3)
- self.leave_frame(test_frame, with_python_scope=True)
-
- # if we don't have an recursive loop we have to find the shadowed
- # variables at that point. Because loops can be nested but the loop
- # variable is a special one we have to enforce aliasing for it.
- if node.recursive:
+ self.visit(node.target, loop_frame)
+ self.outdent(3)
+ self.leave_frame(test_frame, with_python_scope=True)
+
+ # if we don't have an recursive loop we have to find the shadowed
+ # variables at that point. Because loops can be nested but the loop
+ # variable is a special one we have to enforce aliasing for it.
+ if node.recursive:
self.writeline(
"%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node
)
- self.indent()
- self.buffer(loop_frame)
-
- # Use the same buffer for the else frame
- else_frame.buffer = loop_frame.buffer
-
- # make sure the loop variable is a special one and raise a template
- # assertion error if a loop tries to write to loop
- if extended_loop:
+ self.indent()
+ self.buffer(loop_frame)
+
+ # Use the same buffer for the else frame
+ else_frame.buffer = loop_frame.buffer
+
+ # make sure the loop variable is a special one and raise a template
+ # assertion error if a loop tries to write to loop
+ if extended_loop:
self.writeline("%s = missing" % loop_ref)
-
- for name in node.find_all(nodes.Name):
+
+ for name in node.find_all(nodes.Name):
if name.ctx == "store" and name.name == "loop":
self.fail(
"Can't assign to special loop variable in for-loop target",
name.lineno,
)
-
- if node.else_:
- iteration_indicator = self.temporary_identifier()
+
+ if node.else_:
+ iteration_indicator = self.temporary_identifier()
self.writeline("%s = 1" % iteration_indicator)
-
+
self.writeline(self.environment.is_async and "async for " or "for ", node)
- self.visit(node.target, loop_frame)
- if extended_loop:
- if self.environment.is_async:
+ self.visit(node.target, loop_frame)
+ if extended_loop:
+ if self.environment.is_async:
self.write(", %s in AsyncLoopContext(" % loop_ref)
- else:
+ else:
self.write(", %s in LoopContext(" % loop_ref)
- else:
+ else:
self.write(" in ")
-
- if node.test:
+
+ if node.test:
self.write("%s(" % loop_filter_func)
- if node.recursive:
+ if node.recursive:
self.write("reciter")
- else:
- if self.environment.is_async and not extended_loop:
+ else:
+ if self.environment.is_async and not extended_loop:
self.write("auto_aiter(")
- self.visit(node.iter, frame)
- if self.environment.is_async and not extended_loop:
+ self.visit(node.iter, frame)
+ if self.environment.is_async and not extended_loop:
self.write(")")
- if node.test:
+ if node.test:
self.write(")")
-
- if node.recursive:
+
+ if node.recursive:
self.write(", undefined, loop_render_func, depth):")
- else:
+ else:
self.write(extended_loop and ", undefined):" or ":")
-
- self.indent()
- self.enter_frame(loop_frame)
-
- self.blockvisit(node.body, loop_frame)
- if node.else_:
+
+ self.indent()
+ self.enter_frame(loop_frame)
+
+ self.blockvisit(node.body, loop_frame)
+ if node.else_:
self.writeline("%s = 0" % iteration_indicator)
- self.outdent()
+ self.outdent()
self.leave_frame(
loop_frame, with_python_scope=node.recursive and not node.else_
)
-
- if node.else_:
+
+ if node.else_:
self.writeline("if %s:" % iteration_indicator)
- self.indent()
- self.enter_frame(else_frame)
- self.blockvisit(node.else_, else_frame)
- self.leave_frame(else_frame)
- self.outdent()
-
- # if the node was recursive we have to return the buffer contents
- # and start the iteration code
- if node.recursive:
- self.return_buffer_contents(loop_frame)
- self.outdent()
- self.start_write(frame, node)
- if self.environment.is_async:
+ self.indent()
+ self.enter_frame(else_frame)
+ self.blockvisit(node.else_, else_frame)
+ self.leave_frame(else_frame)
+ self.outdent()
+
+ # if the node was recursive we have to return the buffer contents
+ # and start the iteration code
+ if node.recursive:
+ self.return_buffer_contents(loop_frame)
+ self.outdent()
+ self.start_write(frame, node)
+ if self.environment.is_async:
self.write("await ")
self.write("loop(")
- if self.environment.is_async:
+ if self.environment.is_async:
self.write("auto_aiter(")
- self.visit(node.iter, frame)
- if self.environment.is_async:
+ self.visit(node.iter, frame)
+ if self.environment.is_async:
self.write(")")
self.write(", loop)")
- self.end_write(frame)
-
- def visit_If(self, node, frame):
- if_frame = frame.soft()
+ self.end_write(frame)
+
+ def visit_If(self, node, frame):
+ if_frame = frame.soft()
self.writeline("if ", node)
- self.visit(node.test, if_frame)
+ self.visit(node.test, if_frame)
self.write(":")
- self.indent()
- self.blockvisit(node.body, if_frame)
- self.outdent()
- for elif_ in node.elif_:
+ self.indent()
+ self.blockvisit(node.body, if_frame)
+ self.outdent()
+ for elif_ in node.elif_:
self.writeline("elif ", elif_)
- self.visit(elif_.test, if_frame)
+ self.visit(elif_.test, if_frame)
self.write(":")
- self.indent()
- self.blockvisit(elif_.body, if_frame)
- self.outdent()
- if node.else_:
+ self.indent()
+ self.blockvisit(elif_.body, if_frame)
+ self.outdent()
+ if node.else_:
self.writeline("else:")
- self.indent()
- self.blockvisit(node.else_, if_frame)
- self.outdent()
-
- def visit_Macro(self, node, frame):
- macro_frame, macro_ref = self.macro_body(node, frame)
- self.newline()
- if frame.toplevel:
+ self.indent()
+ self.blockvisit(node.else_, if_frame)
+ self.outdent()
+
+ def visit_Macro(self, node, frame):
+ macro_frame, macro_ref = self.macro_body(node, frame)
+ self.newline()
+ if frame.toplevel:
if not node.name.startswith("_"):
self.write("context.exported_vars.add(%r)" % node.name)
self.writeline("context.vars[%r] = " % node.name)
self.write("%s = " % frame.symbols.ref(node.name))
- self.macro_def(macro_ref, macro_frame)
-
- def visit_CallBlock(self, node, frame):
- call_frame, macro_ref = self.macro_body(node, frame)
+ self.macro_def(macro_ref, macro_frame)
+
+ def visit_CallBlock(self, node, frame):
+ call_frame, macro_ref = self.macro_body(node, frame)
self.writeline("caller = ")
- self.macro_def(macro_ref, call_frame)
- self.start_write(frame, node)
- self.visit_Call(node.call, frame, forward_caller=True)
- self.end_write(frame)
-
- def visit_FilterBlock(self, node, frame):
- filter_frame = frame.inner()
- filter_frame.symbols.analyze_node(node)
- self.enter_frame(filter_frame)
- self.buffer(filter_frame)
- self.blockvisit(node.body, filter_frame)
- self.start_write(frame, node)
- self.visit_Filter(node.filter, filter_frame)
- self.end_write(frame)
- self.leave_frame(filter_frame)
-
- def visit_With(self, node, frame):
- with_frame = frame.inner()
- with_frame.symbols.analyze_node(node)
- self.enter_frame(with_frame)
+ self.macro_def(macro_ref, call_frame)
+ self.start_write(frame, node)
+ self.visit_Call(node.call, frame, forward_caller=True)
+ self.end_write(frame)
+
+ def visit_FilterBlock(self, node, frame):
+ filter_frame = frame.inner()
+ filter_frame.symbols.analyze_node(node)
+ self.enter_frame(filter_frame)
+ self.buffer(filter_frame)
+ self.blockvisit(node.body, filter_frame)
+ self.start_write(frame, node)
+ self.visit_Filter(node.filter, filter_frame)
+ self.end_write(frame)
+ self.leave_frame(filter_frame)
+
+ def visit_With(self, node, frame):
+ with_frame = frame.inner()
+ with_frame.symbols.analyze_node(node)
+ self.enter_frame(with_frame)
for target, expr in izip(node.targets, node.values):
- self.newline()
- self.visit(target, with_frame)
+ self.newline()
+ self.visit(target, with_frame)
self.write(" = ")
- self.visit(expr, frame)
- self.blockvisit(node.body, with_frame)
- self.leave_frame(with_frame)
-
- def visit_ExprStmt(self, node, frame):
- self.newline(node)
- self.visit(node.node, frame)
-
+ self.visit(expr, frame)
+ self.blockvisit(node.body, with_frame)
+ self.leave_frame(with_frame)
+
+ def visit_ExprStmt(self, node, frame):
+ self.newline(node)
+ self.visit(node.node, frame)
+
_FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src"))
#: The default finalize function if the environment isn't configured
#: with one. Or if the environment has one, this is called on that
#: function's output for constants.
_default_finalize = text_type
_finalize = None
-
+
def _make_finalize(self):
"""Build the finalize function to be used on constants and at
runtime. Cached so it's only created once for all output nodes.
@@ -1300,7 +1300,7 @@ class CodeGenerator(NodeVisitor):
finalize = default = self._default_finalize
src = None
- if self.environment.finalize:
+ if self.environment.finalize:
src = "environment.finalize("
env_finalize = self.environment.finalize
@@ -1356,9 +1356,9 @@ class CodeGenerator(NodeVisitor):
self.write("(escape if context.eval_ctx.autoescape else to_string)(")
elif frame.eval_ctx.autoescape:
self.write("escape(")
- else:
+ else:
self.write("to_string(")
-
+
if finalize.src is not None:
self.write(finalize.src)
@@ -1373,22 +1373,22 @@ class CodeGenerator(NodeVisitor):
def visit_Output(self, node, frame):
# If an extends is active, don't render outside a block.
- if frame.require_output_check:
+ if frame.require_output_check:
# A top-level extends is known to exist at compile time.
if self.has_known_extends:
return
self.writeline("if parent_template is None:")
- self.indent()
-
+ self.indent()
+
finalize = self._make_finalize()
- body = []
+ body = []
# Evaluate constants at compile time if possible. Each item in
# body will be either a list of static data or a node to be
# evaluated at runtime.
- for child in node.nodes:
- try:
+ for child in node.nodes:
+ try:
if not (
# If the finalize function requires runtime context,
# constants can't be evaluated at compile time.
@@ -1397,21 +1397,21 @@ class CodeGenerator(NodeVisitor):
# finalized anyway.
or isinstance(child, nodes.TemplateData)
):
- raise nodes.Impossible()
+ raise nodes.Impossible()
const = self._output_child_to_const(child, frame, finalize)
except (nodes.Impossible, Exception):
# The node was not constant and needs to be evaluated at
# runtime. Or another error was raised, which is easier
# to debug at runtime.
- body.append(child)
- continue
-
- if body and isinstance(body[-1], list):
- body[-1].append(const)
- else:
- body.append([const])
-
+ body.append(child)
+ continue
+
+ if body and isinstance(body[-1], list):
+ body[-1].append(const)
+ else:
+ body.append([const])
+
if frame.buffer is not None:
if len(body) == 1:
self.writeline("%s.append(" % frame.buffer)
@@ -1427,14 +1427,14 @@ class CodeGenerator(NodeVisitor):
if frame.buffer is None:
self.writeline("yield " + val)
- else:
+ else:
self.writeline(val + ",")
else:
if frame.buffer is None:
self.writeline("yield ", item)
- else:
+ else:
self.newline(item)
-
+
# A node to be evaluated at runtime.
self._output_child_pre(item, frame, finalize)
self.visit(item, frame)
@@ -1444,55 +1444,55 @@ class CodeGenerator(NodeVisitor):
self.write(",")
if frame.buffer is not None:
- self.outdent()
+ self.outdent()
self.writeline(")" if len(body) == 1 else "))")
-
+
if frame.require_output_check:
- self.outdent()
-
- def visit_Assign(self, node, frame):
- self.push_assign_tracking()
- self.newline(node)
- self.visit(node.target, frame)
+ self.outdent()
+
+ def visit_Assign(self, node, frame):
+ self.push_assign_tracking()
+ self.newline(node)
+ self.visit(node.target, frame)
self.write(" = ")
- self.visit(node.node, frame)
- self.pop_assign_tracking(frame)
-
- def visit_AssignBlock(self, node, frame):
- self.push_assign_tracking()
- block_frame = frame.inner()
- # This is a special case. Since a set block always captures we
- # will disable output checks. This way one can use set blocks
- # toplevel even in extended templates.
- block_frame.require_output_check = False
- block_frame.symbols.analyze_node(node)
- self.enter_frame(block_frame)
- self.buffer(block_frame)
- self.blockvisit(node.body, block_frame)
- self.newline(node)
- self.visit(node.target, frame)
+ self.visit(node.node, frame)
+ self.pop_assign_tracking(frame)
+
+ def visit_AssignBlock(self, node, frame):
+ self.push_assign_tracking()
+ block_frame = frame.inner()
+ # This is a special case. Since a set block always captures we
+ # will disable output checks. This way one can use set blocks
+ # toplevel even in extended templates.
+ block_frame.require_output_check = False
+ block_frame.symbols.analyze_node(node)
+ self.enter_frame(block_frame)
+ self.buffer(block_frame)
+ self.blockvisit(node.body, block_frame)
+ self.newline(node)
+ self.visit(node.target, frame)
self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
- if node.filter is not None:
- self.visit_Filter(node.filter, block_frame)
- else:
+ if node.filter is not None:
+ self.visit_Filter(node.filter, block_frame)
+ else:
self.write("concat(%s)" % block_frame.buffer)
self.write(")")
- self.pop_assign_tracking(frame)
- self.leave_frame(block_frame)
-
- # -- Expression Visitors
-
- def visit_Name(self, node, frame):
+ self.pop_assign_tracking(frame)
+ self.leave_frame(block_frame)
+
+ # -- Expression Visitors
+
+ def visit_Name(self, node, frame):
if node.ctx == "store" and frame.toplevel:
- if self._assign_stack:
- self._assign_stack[-1].add(node.name)
- ref = frame.symbols.ref(node.name)
-
- # If we are looking up a variable we might have to deal with the
- # case where it's undefined. We can skip that case if the load
- # instruction indicates a parameter which are always defined.
+ if self._assign_stack:
+ self._assign_stack[-1].add(node.name)
+ ref = frame.symbols.ref(node.name)
+
+ # If we are looking up a variable we might have to deal with the
+ # case where it's undefined. We can skip that case if the load
+ # instruction indicates a parameter which are always defined.
if node.ctx == "load":
- load = frame.symbols.find_load(ref)
+ load = frame.symbols.find_load(ref)
if not (
load is not None
and load[0] == VAR_LOAD_PARAMETER
@@ -1502,102 +1502,102 @@ class CodeGenerator(NodeVisitor):
"(undefined(name=%r) if %s is missing else %s)"
% (node.name, ref, ref)
)
- return
-
- self.write(ref)
-
- def visit_NSRef(self, node, frame):
- # NSRefs can only be used to store values; since they use the normal
- # `foo.bar` notation they will be parsed as a normal attribute access
- # when used anywhere but in a `set` context
- ref = frame.symbols.ref(node.name)
+ return
+
+ self.write(ref)
+
+ def visit_NSRef(self, node, frame):
+ # NSRefs can only be used to store values; since they use the normal
+ # `foo.bar` notation they will be parsed as a normal attribute access
+ # when used anywhere but in a `set` context
+ ref = frame.symbols.ref(node.name)
self.writeline("if not isinstance(%s, Namespace):" % ref)
- self.indent()
+ self.indent()
self.writeline(
"raise TemplateRuntimeError(%r)"
% "cannot assign attribute on non-namespace object"
)
- self.outdent()
+ self.outdent()
self.writeline("%s[%r]" % (ref, node.attr))
-
- def visit_Const(self, node, frame):
- val = node.as_const(frame.eval_ctx)
- if isinstance(val, float):
- self.write(str(val))
- else:
- self.write(repr(val))
-
- def visit_TemplateData(self, node, frame):
- try:
- self.write(repr(node.as_const(frame.eval_ctx)))
- except nodes.Impossible:
+
+ def visit_Const(self, node, frame):
+ val = node.as_const(frame.eval_ctx)
+ if isinstance(val, float):
+ self.write(str(val))
+ else:
+ self.write(repr(val))
+
+ def visit_TemplateData(self, node, frame):
+ try:
+ self.write(repr(node.as_const(frame.eval_ctx)))
+ except nodes.Impossible:
self.write(
"(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data
)
-
- def visit_Tuple(self, node, frame):
+
+ def visit_Tuple(self, node, frame):
self.write("(")
- idx = -1
- for idx, item in enumerate(node.items):
- if idx:
+ idx = -1
+ for idx, item in enumerate(node.items):
+ if idx:
self.write(", ")
- self.visit(item, frame)
+ self.visit(item, frame)
self.write(idx == 0 and ",)" or ")")
-
- def visit_List(self, node, frame):
+
+ def visit_List(self, node, frame):
self.write("[")
- for idx, item in enumerate(node.items):
- if idx:
+ for idx, item in enumerate(node.items):
+ if idx:
self.write(", ")
- self.visit(item, frame)
+ self.visit(item, frame)
self.write("]")
-
- def visit_Dict(self, node, frame):
+
+ def visit_Dict(self, node, frame):
self.write("{")
- for idx, item in enumerate(node.items):
- if idx:
+ for idx, item in enumerate(node.items):
+ if idx:
self.write(", ")
- self.visit(item.key, frame)
+ self.visit(item.key, frame)
self.write(": ")
- self.visit(item.value, frame)
+ self.visit(item.value, frame)
self.write("}")
-
+
def binop(operator, interceptable=True): # noqa: B902
- @optimizeconst
- def visitor(self, node, frame):
+ @optimizeconst
+ def visitor(self, node, frame):
if (
self.environment.sandboxed
and operator in self.environment.intercepted_binops
):
self.write("environment.call_binop(context, %r, " % operator)
- self.visit(node.left, frame)
+ self.visit(node.left, frame)
self.write(", ")
- self.visit(node.right, frame)
- else:
+ self.visit(node.right, frame)
+ else:
self.write("(")
- self.visit(node.left, frame)
+ self.visit(node.left, frame)
self.write(" %s " % operator)
- self.visit(node.right, frame)
+ self.visit(node.right, frame)
self.write(")")
- return visitor
-
+ return visitor
+
def uaop(operator, interceptable=True): # noqa: B902
- @optimizeconst
- def visitor(self, node, frame):
+ @optimizeconst
+ def visitor(self, node, frame):
if (
self.environment.sandboxed
and operator in self.environment.intercepted_unops
):
self.write("environment.call_unop(context, %r, " % operator)
- self.visit(node.node, frame)
- else:
+ self.visit(node.node, frame)
+ else:
self.write("(" + operator)
- self.visit(node.node, frame)
+ self.visit(node.node, frame)
self.write(")")
- return visitor
-
+ return visitor
+
visit_Add = binop("+")
visit_Sub = binop("-")
visit_Mul = binop("*")
@@ -1610,84 +1610,84 @@ class CodeGenerator(NodeVisitor):
visit_Pos = uaop("+")
visit_Neg = uaop("-")
visit_Not = uaop("not ", interceptable=False)
- del binop, uaop
-
- @optimizeconst
- def visit_Concat(self, node, frame):
- if frame.eval_ctx.volatile:
+ del binop, uaop
+
+ @optimizeconst
+ def visit_Concat(self, node, frame):
+ if frame.eval_ctx.volatile:
func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)"
- elif frame.eval_ctx.autoescape:
+ elif frame.eval_ctx.autoescape:
func_name = "markup_join"
- else:
+ else:
func_name = "unicode_join"
self.write("%s((" % func_name)
- for arg in node.nodes:
- self.visit(arg, frame)
+ for arg in node.nodes:
+ self.visit(arg, frame)
self.write(", ")
self.write("))")
-
- @optimizeconst
- def visit_Compare(self, node, frame):
+
+ @optimizeconst
+ def visit_Compare(self, node, frame):
self.write("(")
- self.visit(node.expr, frame)
- for op in node.ops:
- self.visit(op, frame)
+ self.visit(node.expr, frame)
+ for op in node.ops:
+ self.visit(op, frame)
self.write(")")
-
- def visit_Operand(self, node, frame):
+
+ def visit_Operand(self, node, frame):
self.write(" %s " % operators[node.op])
- self.visit(node.expr, frame)
-
- @optimizeconst
- def visit_Getattr(self, node, frame):
+ self.visit(node.expr, frame)
+
+ @optimizeconst
+ def visit_Getattr(self, node, frame):
if self.environment.is_async:
self.write("(await auto_await(")
self.write("environment.getattr(")
- self.visit(node.node, frame)
+ self.visit(node.node, frame)
self.write(", %r)" % node.attr)
-
+
if self.environment.is_async:
self.write("))")
- @optimizeconst
- def visit_Getitem(self, node, frame):
- # slices bypass the environment getitem method.
- if isinstance(node.arg, nodes.Slice):
- self.visit(node.node, frame)
+ @optimizeconst
+ def visit_Getitem(self, node, frame):
+ # slices bypass the environment getitem method.
+ if isinstance(node.arg, nodes.Slice):
+ self.visit(node.node, frame)
self.write("[")
- self.visit(node.arg, frame)
+ self.visit(node.arg, frame)
self.write("]")
- else:
+ else:
if self.environment.is_async:
self.write("(await auto_await(")
self.write("environment.getitem(")
- self.visit(node.node, frame)
+ self.visit(node.node, frame)
self.write(", ")
- self.visit(node.arg, frame)
+ self.visit(node.arg, frame)
self.write(")")
-
+
if self.environment.is_async:
self.write("))")
- def visit_Slice(self, node, frame):
- if node.start is not None:
- self.visit(node.start, frame)
+ def visit_Slice(self, node, frame):
+ if node.start is not None:
+ self.visit(node.start, frame)
self.write(":")
- if node.stop is not None:
- self.visit(node.stop, frame)
- if node.step is not None:
+ if node.stop is not None:
+ self.visit(node.stop, frame)
+ if node.step is not None:
self.write(":")
- self.visit(node.step, frame)
-
- @optimizeconst
- def visit_Filter(self, node, frame):
- if self.environment.is_async:
+ self.visit(node.step, frame)
+
+ @optimizeconst
+ def visit_Filter(self, node, frame):
+ if self.environment.is_async:
self.write("await auto_await(")
self.write(self.filters[node.name] + "(")
- func = self.environment.filters.get(node.name)
- if func is None:
+ func = self.environment.filters.get(node.name)
+ if func is None:
self.fail("no filter named %r" % node.name, node.lineno)
if getattr(func, "contextfilter", False) is True:
self.write("context, ")
@@ -1695,39 +1695,39 @@ class CodeGenerator(NodeVisitor):
self.write("context.eval_ctx, ")
elif getattr(func, "environmentfilter", False) is True:
self.write("environment, ")
-
- # if the filter node is None we are inside a filter block
- # and want to write to the current buffer
- if node.node is not None:
- self.visit(node.node, frame)
- elif frame.eval_ctx.volatile:
+
+ # if the filter node is None we are inside a filter block
+ # and want to write to the current buffer
+ if node.node is not None:
+ self.visit(node.node, frame)
+ elif frame.eval_ctx.volatile:
self.write(
"(context.eval_ctx.autoescape and"
" Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer)
)
- elif frame.eval_ctx.autoescape:
+ elif frame.eval_ctx.autoescape:
self.write("Markup(concat(%s))" % frame.buffer)
- else:
+ else:
self.write("concat(%s)" % frame.buffer)
- self.signature(node, frame)
+ self.signature(node, frame)
self.write(")")
- if self.environment.is_async:
+ if self.environment.is_async:
self.write(")")
-
- @optimizeconst
- def visit_Test(self, node, frame):
+
+ @optimizeconst
+ def visit_Test(self, node, frame):
self.write(self.tests[node.name] + "(")
- if node.name not in self.environment.tests:
+ if node.name not in self.environment.tests:
self.fail("no test named %r" % node.name, node.lineno)
- self.visit(node.node, frame)
- self.signature(node, frame)
+ self.visit(node.node, frame)
+ self.signature(node, frame)
self.write(")")
-
- @optimizeconst
- def visit_CondExpr(self, node, frame):
- def write_expr2():
- if node.expr2 is not None:
- return self.visit(node.expr2, frame)
+
+ @optimizeconst
+ def visit_CondExpr(self, node, frame):
+ def write_expr2():
+ if node.expr2 is not None:
+ return self.visit(node.expr2, frame)
self.write(
"cond_expr_undefined(%r)"
% (
@@ -1736,108 +1736,108 @@ class CodeGenerator(NodeVisitor):
"no else section was defined." % self.position(node)
)
)
-
+
self.write("(")
- self.visit(node.expr1, frame)
+ self.visit(node.expr1, frame)
self.write(" if ")
- self.visit(node.test, frame)
+ self.visit(node.test, frame)
self.write(" else ")
- write_expr2()
+ write_expr2()
self.write(")")
-
- @optimizeconst
- def visit_Call(self, node, frame, forward_caller=False):
- if self.environment.is_async:
+
+ @optimizeconst
+ def visit_Call(self, node, frame, forward_caller=False):
+ if self.environment.is_async:
self.write("await auto_await(")
- if self.environment.sandboxed:
+ if self.environment.sandboxed:
self.write("environment.call(context, ")
- else:
+ else:
self.write("context.call(")
- self.visit(node.node, frame)
+ self.visit(node.node, frame)
extra_kwargs = forward_caller and {"caller": "caller"} or None
- self.signature(node, frame, extra_kwargs)
+ self.signature(node, frame, extra_kwargs)
self.write(")")
- if self.environment.is_async:
+ if self.environment.is_async:
self.write(")")
-
- def visit_Keyword(self, node, frame):
+
+ def visit_Keyword(self, node, frame):
self.write(node.key + "=")
- self.visit(node.value, frame)
-
- # -- Unused nodes for extensions
-
- def visit_MarkSafe(self, node, frame):
+ self.visit(node.value, frame)
+
+ # -- Unused nodes for extensions
+
+ def visit_MarkSafe(self, node, frame):
self.write("Markup(")
- self.visit(node.expr, frame)
+ self.visit(node.expr, frame)
self.write(")")
-
- def visit_MarkSafeIfAutoescape(self, node, frame):
+
+ def visit_MarkSafeIfAutoescape(self, node, frame):
self.write("(context.eval_ctx.autoescape and Markup or identity)(")
- self.visit(node.expr, frame)
+ self.visit(node.expr, frame)
self.write(")")
-
- def visit_EnvironmentAttribute(self, node, frame):
+
+ def visit_EnvironmentAttribute(self, node, frame):
self.write("environment." + node.name)
-
- def visit_ExtensionAttribute(self, node, frame):
+
+ def visit_ExtensionAttribute(self, node, frame):
self.write("environment.extensions[%r].%s" % (node.identifier, node.name))
-
- def visit_ImportedName(self, node, frame):
- self.write(self.import_aliases[node.importname])
-
- def visit_InternalName(self, node, frame):
- self.write(node.name)
-
- def visit_ContextReference(self, node, frame):
+
+ def visit_ImportedName(self, node, frame):
+ self.write(self.import_aliases[node.importname])
+
+ def visit_InternalName(self, node, frame):
+ self.write(node.name)
+
+ def visit_ContextReference(self, node, frame):
self.write("context")
-
+
def visit_DerivedContextReference(self, node, frame):
self.write(self.derive_context(frame))
- def visit_Continue(self, node, frame):
+ def visit_Continue(self, node, frame):
self.writeline("continue", node)
-
- def visit_Break(self, node, frame):
+
+ def visit_Break(self, node, frame):
self.writeline("break", node)
-
- def visit_Scope(self, node, frame):
- scope_frame = frame.inner()
- scope_frame.symbols.analyze_node(node)
- self.enter_frame(scope_frame)
- self.blockvisit(node.body, scope_frame)
- self.leave_frame(scope_frame)
-
- def visit_OverlayScope(self, node, frame):
- ctx = self.temporary_identifier()
+
+ def visit_Scope(self, node, frame):
+ scope_frame = frame.inner()
+ scope_frame.symbols.analyze_node(node)
+ self.enter_frame(scope_frame)
+ self.blockvisit(node.body, scope_frame)
+ self.leave_frame(scope_frame)
+
+ def visit_OverlayScope(self, node, frame):
+ ctx = self.temporary_identifier()
self.writeline("%s = %s" % (ctx, self.derive_context(frame)))
self.writeline("%s.vars = " % ctx)
- self.visit(node.context, frame)
- self.push_context_reference(ctx)
-
- scope_frame = frame.inner(isolated=True)
- scope_frame.symbols.analyze_node(node)
- self.enter_frame(scope_frame)
- self.blockvisit(node.body, scope_frame)
- self.leave_frame(scope_frame)
- self.pop_context_reference()
-
- def visit_EvalContextModifier(self, node, frame):
- for keyword in node.options:
+ self.visit(node.context, frame)
+ self.push_context_reference(ctx)
+
+ scope_frame = frame.inner(isolated=True)
+ scope_frame.symbols.analyze_node(node)
+ self.enter_frame(scope_frame)
+ self.blockvisit(node.body, scope_frame)
+ self.leave_frame(scope_frame)
+ self.pop_context_reference()
+
+ def visit_EvalContextModifier(self, node, frame):
+ for keyword in node.options:
self.writeline("context.eval_ctx.%s = " % keyword.key)
- self.visit(keyword.value, frame)
- try:
- val = keyword.value.as_const(frame.eval_ctx)
- except nodes.Impossible:
- frame.eval_ctx.volatile = True
- else:
- setattr(frame.eval_ctx, keyword.key, val)
-
- def visit_ScopedEvalContextModifier(self, node, frame):
- old_ctx_name = self.temporary_identifier()
- saved_ctx = frame.eval_ctx.save()
+ self.visit(keyword.value, frame)
+ try:
+ val = keyword.value.as_const(frame.eval_ctx)
+ except nodes.Impossible:
+ frame.eval_ctx.volatile = True
+ else:
+ setattr(frame.eval_ctx, keyword.key, val)
+
+ def visit_ScopedEvalContextModifier(self, node, frame):
+ old_ctx_name = self.temporary_identifier()
+ saved_ctx = frame.eval_ctx.save()
self.writeline("%s = context.eval_ctx.save()" % old_ctx_name)
- self.visit_EvalContextModifier(node, frame)
- for child in node.body:
- self.visit(child, frame)
- frame.eval_ctx.revert(saved_ctx)
+ self.visit_EvalContextModifier(node, frame)
+ for child in node.body:
+ self.visit(child, frame)
+ frame.eval_ctx.revert(saved_ctx)
self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name)
diff --git a/contrib/python/Jinja2/py2/jinja2/constants.py b/contrib/python/Jinja2/py2/jinja2/constants.py
index bf7f2ca721..1c39d8cf27 100644
--- a/contrib/python/Jinja2/py2/jinja2/constants.py
+++ b/contrib/python/Jinja2/py2/jinja2/constants.py
@@ -1,21 +1,21 @@
-# -*- coding: utf-8 -*-
-#: list of lorem ipsum words used by the lipsum() helper function
+# -*- coding: utf-8 -*-
+#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS = u"""\
-a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
-auctor augue bibendum blandit class commodo condimentum congue consectetuer
-consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
-diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
-elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
-faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
-hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
-justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
-luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
-mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
-nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
-penatibus per pharetra phasellus placerat platea porta porttitor posuere
-potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
-ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
-sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
-tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
-ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
+a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
+auctor augue bibendum blandit class commodo condimentum congue consectetuer
+consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
+diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
+elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
+faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
+hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
+justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
+luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
+mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
+nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
+penatibus per pharetra phasellus placerat platea porta porttitor posuere
+potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
+ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
+sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
+tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
+ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate"""
diff --git a/contrib/python/Jinja2/py2/jinja2/debug.py b/contrib/python/Jinja2/py2/jinja2/debug.py
index 5d8aec31d0..70f7f66c64 100644
--- a/contrib/python/Jinja2/py2/jinja2/debug.py
+++ b/contrib/python/Jinja2/py2/jinja2/debug.py
@@ -1,19 +1,19 @@
-import sys
+import sys
from types import CodeType
-
+
from . import TemplateSyntaxError
from ._compat import PYPY
from .utils import internal_code
from .utils import missing
-
-
+
+
def rewrite_traceback_stack(source=None):
"""Rewrite the current exception to replace any tracebacks from
within compiled template code with tracebacks that look like they
came from the template source.
-
+
This must be called within an ``except`` block.
-
+
:param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
the current ``exc_info`` is used.
:param source: For ``TemplateSyntaxError``, the original source if
@@ -21,18 +21,18 @@ def rewrite_traceback_stack(source=None):
:return: A :meth:`sys.exc_info` tuple that can be re-raised.
"""
exc_type, exc_value, tb = sys.exc_info()
-
+
if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
exc_value.translated = True
exc_value.source = source
-
+
try:
# Remove the old traceback on Python 3, otherwise the frames
# from the compiler still show up.
exc_value.with_traceback(None)
except AttributeError:
pass
-
+
# Outside of runtime, so the frame isn't executing template
# code, but it still needs to point at the template.
tb = fake_traceback(
@@ -41,9 +41,9 @@ def rewrite_traceback_stack(source=None):
else:
# Skip the frame for the render function.
tb = tb.tb_next
-
+
stack = []
-
+
# Build the stack of traceback object, replacing any in template
# code with the source file and line information.
while tb is not None:
@@ -52,33 +52,33 @@ def rewrite_traceback_stack(source=None):
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
-
+
template = tb.tb_frame.f_globals.get("__jinja_template__")
-
+
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
stack.append(fake_tb)
- else:
+ else:
stack.append(tb)
-
+
tb = tb.tb_next
-
+
tb_next = None
-
+
# Assign tb_next in reverse to avoid circular references.
for tb in reversed(stack):
tb_next = tb_set_next(tb, tb_next)
-
+
return exc_type, exc_value, tb_next
-
-
+
+
def fake_traceback(exc_value, tb, filename, lineno):
"""Produce a new traceback object that looks like it came from the
template source instead of the compiled code. The filename, line
number, and location name will point to the template, and the local
variables will be the current template context.
-
+
:param exc_value: The original exception to be re-raised to create
the new traceback.
:param tb: The original traceback to get the local variables and
@@ -91,9 +91,9 @@ def fake_traceback(exc_value, tb, filename, lineno):
# available at that point in the template.
locals = get_template_locals(tb.tb_frame.f_locals)
locals.pop("__jinja_exception__", None)
- else:
+ else:
locals = {}
-
+
globals = {
"__name__": filename,
"__file__": filename,
@@ -101,25 +101,25 @@ def fake_traceback(exc_value, tb, filename, lineno):
}
# Raise an exception at the correct line number.
code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
-
+
# Build a new code object that points to the template file and
# replaces the location with a block name.
try:
location = "template"
-
+
if tb is not None:
function = tb.tb_frame.f_code.co_name
-
+
if function == "root":
location = "top-level template code"
elif function.startswith("block_"):
location = 'block "%s"' % function[6:]
-
+
# Collect arguments for the new code object. CodeType only
# accepts positional arguments, and arguments were inserted in
# new Python versions.
code_args = []
-
+
for attr in (
"argcount",
"posonlyargcount", # Python 3.8
@@ -142,88 +142,88 @@ def fake_traceback(exc_value, tb, filename, lineno):
# Replace with given value.
code_args.append(attr[1])
continue
-
+
try:
# Copy original value if it exists.
code_args.append(getattr(code, "co_" + attr))
except AttributeError:
# Some arguments were added later.
continue
-
+
code = CodeType(*code_args)
except Exception:
# Some environments such as Google App Engine don't support
# modifying code objects.
pass
-
+
# Execute the new code, which is guaranteed to raise, and return
# the new traceback without this frame.
try:
exec(code, globals, locals)
except BaseException:
return sys.exc_info()[2].tb_next
-
-
+
+
def get_template_locals(real_locals):
"""Based on the runtime locals, get the context that would be
available at that point in the template.
"""
# Start with the current template context.
ctx = real_locals.get("context")
-
- if ctx:
+
+ if ctx:
data = ctx.get_all().copy()
- else:
+ else:
data = {}
-
+
# Might be in a derived context that only sets local variables
# rather than pushing a context. Local variables follow the scheme
# l_depth_name. Find the highest-depth local that has a value for
# each name.
- local_overrides = {}
-
+ local_overrides = {}
+
for name, value in real_locals.items():
if not name.startswith("l_") or value is missing:
# Not a template variable, or no longer relevant.
- continue
+ continue
- try:
+ try:
_, depth, name = name.split("_", 2)
- depth = int(depth)
- except ValueError:
- continue
-
- cur_depth = local_overrides.get(name, (-1,))[0]
+ depth = int(depth)
+ except ValueError:
+ continue
- if cur_depth < depth:
- local_overrides[name] = (depth, value)
+ cur_depth = local_overrides.get(name, (-1,))[0]
+ if cur_depth < depth:
+ local_overrides[name] = (depth, value)
+
# Modify the context with any derived context.
for name, (_, value) in local_overrides.items():
- if value is missing:
+ if value is missing:
data.pop(name, None)
- else:
+ else:
data[name] = value
-
+
return data
-
-
+
+
if sys.version_info >= (3, 7):
# tb_next is directly assignable as of Python 3.7
def tb_set_next(tb, tb_next):
tb.tb_next = tb_next
return tb
-
-
+
+
elif PYPY:
# PyPy might have special support, and won't work with ctypes.
- try:
+ try:
import tputil
except ImportError:
# Without tproxy support, use the original traceback.
def tb_set_next(tb, tb_next):
return tb
-
+
else:
# With tproxy support, create a proxy around the traceback that
# returns the new tb_next.
@@ -231,38 +231,38 @@ elif PYPY:
def controller(op):
if op.opname == "__getattribute__" and op.args[0] == "tb_next":
return tb_next
-
+
return op.delegate()
-
+
return tputil.make_proxy(controller, obj=tb)
-
-
+
+
else:
# Use ctypes to assign tb_next at the C level since it's read-only
# from Python.
- import ctypes
-
+ import ctypes
+
class _CTraceback(ctypes.Structure):
_fields_ = [
# Extra PyObject slots when compiled with Py_TRACE_REFS.
("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
# Only care about tb_next as an object, not a traceback.
("tb_next", ctypes.py_object),
- ]
-
+ ]
+
def tb_set_next(tb, tb_next):
c_tb = _CTraceback.from_address(id(tb))
-
+
# Clear out the old tb_next.
- if tb.tb_next is not None:
+ if tb.tb_next is not None:
c_tb_next = ctypes.py_object(tb.tb_next)
c_tb.tb_next = ctypes.py_object()
ctypes.pythonapi.Py_DecRef(c_tb_next)
-
+
# Assign the new tb_next.
if tb_next is not None:
c_tb_next = ctypes.py_object(tb_next)
ctypes.pythonapi.Py_IncRef(c_tb_next)
c_tb.tb_next = c_tb_next
-
+
return tb
diff --git a/contrib/python/Jinja2/py2/jinja2/defaults.py b/contrib/python/Jinja2/py2/jinja2/defaults.py
index 8e0e7d7710..31154b6b0e 100644
--- a/contrib/python/Jinja2/py2/jinja2/defaults.py
+++ b/contrib/python/Jinja2/py2/jinja2/defaults.py
@@ -1,4 +1,4 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
from ._compat import range_type
from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
from .tests import TESTS as DEFAULT_TESTS # noqa: F401
@@ -6,34 +6,34 @@ from .utils import Cycler
from .utils import generate_lorem_ipsum
from .utils import Joiner
from .utils import Namespace
-
-# defaults for the parser / lexer
+
+# defaults for the parser / lexer
BLOCK_START_STRING = "{%"
BLOCK_END_STRING = "%}"
VARIABLE_START_STRING = "{{"
VARIABLE_END_STRING = "}}"
COMMENT_START_STRING = "{#"
COMMENT_END_STRING = "#}"
-LINE_STATEMENT_PREFIX = None
-LINE_COMMENT_PREFIX = None
-TRIM_BLOCKS = False
-LSTRIP_BLOCKS = False
+LINE_STATEMENT_PREFIX = None
+LINE_COMMENT_PREFIX = None
+TRIM_BLOCKS = False
+LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE = "\n"
-KEEP_TRAILING_NEWLINE = False
-
+KEEP_TRAILING_NEWLINE = False
+
# default filters, tests and namespace
-
-DEFAULT_NAMESPACE = {
+
+DEFAULT_NAMESPACE = {
"range": range_type,
"dict": dict,
"lipsum": generate_lorem_ipsum,
"cycler": Cycler,
"joiner": Joiner,
"namespace": Namespace,
-}
-
-# default policies
-DEFAULT_POLICIES = {
+}
+
+# default policies
+DEFAULT_POLICIES = {
"compiler.ascii_str": True,
"urlize.rel": "noopener",
"urlize.target": None,
@@ -41,4 +41,4 @@ DEFAULT_POLICIES = {
"json.dumps_function": None,
"json.dumps_kwargs": {"sort_keys": True},
"ext.i18n.trimmed": False,
-}
+}
diff --git a/contrib/python/Jinja2/py2/jinja2/environment.py b/contrib/python/Jinja2/py2/jinja2/environment.py
index 8430390eea..0fb5d37933 100644
--- a/contrib/python/Jinja2/py2/jinja2/environment.py
+++ b/contrib/python/Jinja2/py2/jinja2/environment.py
@@ -1,15 +1,15 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Classes for managing templates and their runtime and compile time
options.
-"""
-import os
-import sys
-import weakref
+"""
+import os
+import sys
+import weakref
from functools import partial
from functools import reduce
-
+
from markupsafe import Markup
-
+
from . import nodes
from ._compat import encode_filename
from ._compat import implements_iterator
@@ -58,70 +58,70 @@ from .utils import internalcode
from .utils import LRUCache
from .utils import missing
-# for direct template usage we have up to ten living environments
-_spontaneous_environments = LRUCache(10)
-
-
+# for direct template usage we have up to ten living environments
+_spontaneous_environments = LRUCache(10)
+
+
def get_spontaneous_environment(cls, *args):
"""Return a new spontaneous environment. A spontaneous environment
is used for templates created directly rather than through an
existing environment.
-
+
:param cls: Environment class to create.
:param args: Positional arguments passed to environment.
- """
+ """
key = (cls, args)
- try:
+ try:
return _spontaneous_environments[key]
except KeyError:
_spontaneous_environments[key] = env = cls(*args)
env.shared = True
- return env
-
-
-def create_cache(size):
- """Return the cache class for the given size."""
- if size == 0:
- return None
- if size < 0:
- return {}
- return LRUCache(size)
-
-
-def copy_cache(cache):
- """Create an empty copy of the given cache."""
- if cache is None:
- return None
- elif type(cache) is dict:
- return {}
- return LRUCache(cache.capacity)
-
-
-def load_extensions(environment, extensions):
- """Load the extensions from the list and bind it to the environment.
- Returns a dict of instantiated environments.
- """
- result = {}
- for extension in extensions:
- if isinstance(extension, string_types):
- extension = import_string(extension)
- result[extension.identifier] = extension(environment)
- return result
-
-
-def fail_for_missing_callable(string, name):
- msg = string % name
- if isinstance(name, Undefined):
- try:
- name._fail_with_undefined_error()
- except Exception as e:
+ return env
+
+
+def create_cache(size):
+ """Return the cache class for the given size."""
+ if size == 0:
+ return None
+ if size < 0:
+ return {}
+ return LRUCache(size)
+
+
+def copy_cache(cache):
+ """Create an empty copy of the given cache."""
+ if cache is None:
+ return None
+ elif type(cache) is dict:
+ return {}
+ return LRUCache(cache.capacity)
+
+
+def load_extensions(environment, extensions):
+ """Load the extensions from the list and bind it to the environment.
+ Returns a dict of instantiated environments.
+ """
+ result = {}
+ for extension in extensions:
+ if isinstance(extension, string_types):
+ extension = import_string(extension)
+ result[extension.identifier] = extension(environment)
+ return result
+
+
+def fail_for_missing_callable(string, name):
+ msg = string % name
+ if isinstance(name, Undefined):
+ try:
+ name._fail_with_undefined_error()
+ except Exception as e:
msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e)
- raise TemplateRuntimeError(msg)
-
-
-def _environment_sanity_check(environment):
- """Perform a sanity check on the environment."""
+ raise TemplateRuntimeError(msg)
+
+
+def _environment_sanity_check(environment):
+ """Perform a sanity check on the environment."""
assert issubclass(
environment.undefined, Undefined
), "undefined must be a subclass of undefined because filters depend on it."
@@ -135,156 +135,156 @@ def _environment_sanity_check(environment):
"\r\n",
"\n",
), "newline_sequence set to unknown line ending string."
- return environment
-
-
-class Environment(object):
- r"""The core component of Jinja is the `Environment`. It contains
- important shared variables like configuration, filters, tests,
- globals and others. Instances of this class may be modified if
- they are not shared and if no template was loaded so far.
- Modifications on environments after the first template was loaded
- will lead to surprising effects and undefined behavior.
-
- Here are the possible initialization parameters:
-
- `block_start_string`
- The string marking the beginning of a block. Defaults to ``'{%'``.
-
- `block_end_string`
- The string marking the end of a block. Defaults to ``'%}'``.
-
- `variable_start_string`
- The string marking the beginning of a print statement.
- Defaults to ``'{{'``.
-
- `variable_end_string`
- The string marking the end of a print statement. Defaults to
- ``'}}'``.
-
- `comment_start_string`
- The string marking the beginning of a comment. Defaults to ``'{#'``.
-
- `comment_end_string`
- The string marking the end of a comment. Defaults to ``'#}'``.
-
- `line_statement_prefix`
- If given and a string, this will be used as prefix for line based
- statements. See also :ref:`line-statements`.
-
- `line_comment_prefix`
- If given and a string, this will be used as prefix for line based
- comments. See also :ref:`line-statements`.
-
- .. versionadded:: 2.2
-
- `trim_blocks`
- If this is set to ``True`` the first newline after a block is
- removed (block, not variable tag!). Defaults to `False`.
-
- `lstrip_blocks`
- If this is set to ``True`` leading spaces and tabs are stripped
- from the start of a line to a block. Defaults to `False`.
-
- `newline_sequence`
- The sequence that starts a newline. Must be one of ``'\r'``,
- ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
- useful default for Linux and OS X systems as well as web
- applications.
-
- `keep_trailing_newline`
- Preserve the trailing newline when rendering templates.
- The default is ``False``, which causes a single newline,
- if present, to be stripped from the end of the template.
-
- .. versionadded:: 2.7
-
- `extensions`
- List of Jinja extensions to use. This can either be import paths
- as strings or extension classes. For more information have a
- look at :ref:`the extensions documentation <jinja-extensions>`.
-
- `optimized`
- should the optimizer be enabled? Default is ``True``.
-
- `undefined`
- :class:`Undefined` or a subclass of it that is used to represent
- undefined values in the template.
-
- `finalize`
- A callable that can be used to process the result of a variable
- expression before it is output. For example one can convert
- ``None`` implicitly into an empty string here.
-
- `autoescape`
- If set to ``True`` the XML/HTML autoescaping feature is enabled by
- default. For more details about autoescaping see
+ return environment
+
+
+class Environment(object):
+ r"""The core component of Jinja is the `Environment`. It contains
+ important shared variables like configuration, filters, tests,
+ globals and others. Instances of this class may be modified if
+ they are not shared and if no template was loaded so far.
+ Modifications on environments after the first template was loaded
+ will lead to surprising effects and undefined behavior.
+
+ Here are the possible initialization parameters:
+
+ `block_start_string`
+ The string marking the beginning of a block. Defaults to ``'{%'``.
+
+ `block_end_string`
+ The string marking the end of a block. Defaults to ``'%}'``.
+
+ `variable_start_string`
+ The string marking the beginning of a print statement.
+ Defaults to ``'{{'``.
+
+ `variable_end_string`
+ The string marking the end of a print statement. Defaults to
+ ``'}}'``.
+
+ `comment_start_string`
+ The string marking the beginning of a comment. Defaults to ``'{#'``.
+
+ `comment_end_string`
+ The string marking the end of a comment. Defaults to ``'#}'``.
+
+ `line_statement_prefix`
+ If given and a string, this will be used as prefix for line based
+ statements. See also :ref:`line-statements`.
+
+ `line_comment_prefix`
+ If given and a string, this will be used as prefix for line based
+ comments. See also :ref:`line-statements`.
+
+ .. versionadded:: 2.2
+
+ `trim_blocks`
+ If this is set to ``True`` the first newline after a block is
+ removed (block, not variable tag!). Defaults to `False`.
+
+ `lstrip_blocks`
+ If this is set to ``True`` leading spaces and tabs are stripped
+ from the start of a line to a block. Defaults to `False`.
+
+ `newline_sequence`
+ The sequence that starts a newline. Must be one of ``'\r'``,
+ ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
+ useful default for Linux and OS X systems as well as web
+ applications.
+
+ `keep_trailing_newline`
+ Preserve the trailing newline when rendering templates.
+ The default is ``False``, which causes a single newline,
+ if present, to be stripped from the end of the template.
+
+ .. versionadded:: 2.7
+
+ `extensions`
+ List of Jinja extensions to use. This can either be import paths
+ as strings or extension classes. For more information have a
+ look at :ref:`the extensions documentation <jinja-extensions>`.
+
+ `optimized`
+ should the optimizer be enabled? Default is ``True``.
+
+ `undefined`
+ :class:`Undefined` or a subclass of it that is used to represent
+ undefined values in the template.
+
+ `finalize`
+ A callable that can be used to process the result of a variable
+ expression before it is output. For example one can convert
+ ``None`` implicitly into an empty string here.
+
+ `autoescape`
+ If set to ``True`` the XML/HTML autoescaping feature is enabled by
+ default. For more details about autoescaping see
:class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
- be a callable that is passed the template name and has to
- return ``True`` or ``False`` depending on autoescape should be
- enabled by default.
-
- .. versionchanged:: 2.4
- `autoescape` can now be a function
-
- `loader`
- The template loader for this environment.
-
- `cache_size`
- The size of the cache. Per default this is ``400`` which means
- that if more than 400 templates are loaded the loader will clean
- out the least recently used template. If the cache size is set to
- ``0`` templates are recompiled all the time, if the cache size is
- ``-1`` the cache will not be cleaned.
-
- .. versionchanged:: 2.8
- The cache size was increased to 400 from a low 50.
-
- `auto_reload`
- Some loaders load templates from locations where the template
- sources may change (ie: file system or database). If
- ``auto_reload`` is set to ``True`` (default) every time a template is
- requested the loader checks if the source changed and if yes, it
- will reload the template. For higher performance it's possible to
- disable that.
-
- `bytecode_cache`
- If set to a bytecode cache object, this object will provide a
- cache for the internal Jinja bytecode so that templates don't
- have to be parsed if they were not changed.
-
- See :ref:`bytecode-cache` for more information.
-
- `enable_async`
- If set to true this enables async template execution which allows
- you to take advantage of newer Python features. This requires
- Python 3.6 or later.
- """
-
- #: if this environment is sandboxed. Modifying this variable won't make
- #: the environment sandboxed though. For a real sandboxed environment
- #: have a look at jinja2.sandbox. This flag alone controls the code
- #: generation by the compiler.
- sandboxed = False
-
- #: True if the environment is just an overlay
- overlayed = False
-
- #: the environment this environment is linked to if it is an overlay
- linked_to = None
-
- #: shared environments have this set to `True`. A shared environment
- #: must not be modified
- shared = False
-
- #: the class that is used for code generation. See
- #: :class:`~jinja2.compiler.CodeGenerator` for more information.
- code_generator_class = CodeGenerator
-
- #: the context class thatis used for templates. See
- #: :class:`~jinja2.runtime.Context` for more information.
- context_class = Context
-
+ be a callable that is passed the template name and has to
+ return ``True`` or ``False`` depending on autoescape should be
+ enabled by default.
+
+ .. versionchanged:: 2.4
+ `autoescape` can now be a function
+
+ `loader`
+ The template loader for this environment.
+
+ `cache_size`
+ The size of the cache. Per default this is ``400`` which means
+ that if more than 400 templates are loaded the loader will clean
+ out the least recently used template. If the cache size is set to
+ ``0`` templates are recompiled all the time, if the cache size is
+ ``-1`` the cache will not be cleaned.
+
+ .. versionchanged:: 2.8
+ The cache size was increased to 400 from a low 50.
+
+ `auto_reload`
+ Some loaders load templates from locations where the template
+ sources may change (ie: file system or database). If
+ ``auto_reload`` is set to ``True`` (default) every time a template is
+ requested the loader checks if the source changed and if yes, it
+ will reload the template. For higher performance it's possible to
+ disable that.
+
+ `bytecode_cache`
+ If set to a bytecode cache object, this object will provide a
+ cache for the internal Jinja bytecode so that templates don't
+ have to be parsed if they were not changed.
+
+ See :ref:`bytecode-cache` for more information.
+
+ `enable_async`
+ If set to true this enables async template execution which allows
+ you to take advantage of newer Python features. This requires
+ Python 3.6 or later.
+ """
+
+ #: if this environment is sandboxed. Modifying this variable won't make
+ #: the environment sandboxed though. For a real sandboxed environment
+ #: have a look at jinja2.sandbox. This flag alone controls the code
+ #: generation by the compiler.
+ sandboxed = False
+
+ #: True if the environment is just an overlay
+ overlayed = False
+
+ #: the environment this environment is linked to if it is an overlay
+ linked_to = None
+
+ #: shared environments have this set to `True`. A shared environment
+ #: must not be modified
+ shared = False
+
+ #: the class that is used for code generation. See
+ #: :class:`~jinja2.compiler.CodeGenerator` for more information.
+ code_generator_class = CodeGenerator
+
+ #: the context class thatis used for templates. See
+ #: :class:`~jinja2.runtime.Context` for more information.
+ context_class = Context
+
def __init__(
self,
block_start_string=BLOCK_START_STRING,
@@ -310,78 +310,78 @@ class Environment(object):
bytecode_cache=None,
enable_async=False,
):
- # !!Important notice!!
- # The constructor accepts quite a few arguments that should be
- # passed by keyword rather than position. However it's important to
- # not change the order of arguments because it's used at least
- # internally in those cases:
- # - spontaneous environments (i18n extension and Template)
- # - unittests
- # If parameter changes are required only add parameters at the end
- # and don't change the arguments (or the defaults!) of the arguments
- # existing already.
-
- # lexer / parser information
- self.block_start_string = block_start_string
- self.block_end_string = block_end_string
- self.variable_start_string = variable_start_string
- self.variable_end_string = variable_end_string
- self.comment_start_string = comment_start_string
- self.comment_end_string = comment_end_string
- self.line_statement_prefix = line_statement_prefix
- self.line_comment_prefix = line_comment_prefix
- self.trim_blocks = trim_blocks
- self.lstrip_blocks = lstrip_blocks
- self.newline_sequence = newline_sequence
- self.keep_trailing_newline = keep_trailing_newline
-
- # runtime information
- self.undefined = undefined
- self.optimized = optimized
- self.finalize = finalize
- self.autoescape = autoescape
-
- # defaults
- self.filters = DEFAULT_FILTERS.copy()
- self.tests = DEFAULT_TESTS.copy()
- self.globals = DEFAULT_NAMESPACE.copy()
-
- # set the loader provided
- self.loader = loader
- self.cache = create_cache(cache_size)
- self.bytecode_cache = bytecode_cache
- self.auto_reload = auto_reload
-
- # configurable policies
- self.policies = DEFAULT_POLICIES.copy()
-
- # load extensions
- self.extensions = load_extensions(self, extensions)
-
- self.enable_async = enable_async
- self.is_async = self.enable_async and have_async_gen
+ # !!Important notice!!
+ # The constructor accepts quite a few arguments that should be
+ # passed by keyword rather than position. However it's important to
+ # not change the order of arguments because it's used at least
+ # internally in those cases:
+ # - spontaneous environments (i18n extension and Template)
+ # - unittests
+ # If parameter changes are required only add parameters at the end
+ # and don't change the arguments (or the defaults!) of the arguments
+ # existing already.
+
+ # lexer / parser information
+ self.block_start_string = block_start_string
+ self.block_end_string = block_end_string
+ self.variable_start_string = variable_start_string
+ self.variable_end_string = variable_end_string
+ self.comment_start_string = comment_start_string
+ self.comment_end_string = comment_end_string
+ self.line_statement_prefix = line_statement_prefix
+ self.line_comment_prefix = line_comment_prefix
+ self.trim_blocks = trim_blocks
+ self.lstrip_blocks = lstrip_blocks
+ self.newline_sequence = newline_sequence
+ self.keep_trailing_newline = keep_trailing_newline
+
+ # runtime information
+ self.undefined = undefined
+ self.optimized = optimized
+ self.finalize = finalize
+ self.autoescape = autoescape
+
+ # defaults
+ self.filters = DEFAULT_FILTERS.copy()
+ self.tests = DEFAULT_TESTS.copy()
+ self.globals = DEFAULT_NAMESPACE.copy()
+
+ # set the loader provided
+ self.loader = loader
+ self.cache = create_cache(cache_size)
+ self.bytecode_cache = bytecode_cache
+ self.auto_reload = auto_reload
+
+ # configurable policies
+ self.policies = DEFAULT_POLICIES.copy()
+
+ # load extensions
+ self.extensions = load_extensions(self, extensions)
+
+ self.enable_async = enable_async
+ self.is_async = self.enable_async and have_async_gen
if self.is_async:
# runs patch_all() to enable async support
from . import asyncsupport # noqa: F401
-
- _environment_sanity_check(self)
-
- def add_extension(self, extension):
- """Adds an extension after the environment was created.
-
- .. versionadded:: 2.5
- """
- self.extensions.update(load_extensions(self, [extension]))
-
- def extend(self, **attributes):
- """Add the items to the instance of the environment if they do not exist
- yet. This is used by :ref:`extensions <writing-extensions>` to register
- callbacks and configuration values without breaking inheritance.
- """
- for key, value in iteritems(attributes):
- if not hasattr(self, key):
- setattr(self, key, value)
-
+
+ _environment_sanity_check(self)
+
+ def add_extension(self, extension):
+ """Adds an extension after the environment was created.
+
+ .. versionadded:: 2.5
+ """
+ self.extensions.update(load_extensions(self, [extension]))
+
+ def extend(self, **attributes):
+ """Add the items to the instance of the environment if they do not exist
+ yet. This is used by :ref:`extensions <writing-extensions>` to register
+ callbacks and configuration values without breaking inheritance.
+ """
+ for key, value in iteritems(attributes):
+ if not hasattr(self, key):
+ setattr(self, key, value)
+
def overlay(
self,
block_start_string=missing,
@@ -404,185 +404,185 @@ class Environment(object):
auto_reload=missing,
bytecode_cache=missing,
):
- """Create a new overlay environment that shares all the data with the
- current environment except for cache and the overridden attributes.
- Extensions cannot be removed for an overlayed environment. An overlayed
- environment automatically gets all the extensions of the environment it
- is linked to plus optional extra extensions.
-
- Creating overlays should happen after the initial environment was set
- up completely. Not all attributes are truly linked, some are just
- copied over so modifications on the original environment may not shine
- through.
- """
- args = dict(locals())
+ """Create a new overlay environment that shares all the data with the
+ current environment except for cache and the overridden attributes.
+ Extensions cannot be removed for an overlayed environment. An overlayed
+ environment automatically gets all the extensions of the environment it
+ is linked to plus optional extra extensions.
+
+ Creating overlays should happen after the initial environment was set
+ up completely. Not all attributes are truly linked, some are just
+ copied over so modifications on the original environment may not shine
+ through.
+ """
+ args = dict(locals())
del args["self"], args["cache_size"], args["extensions"]
-
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.overlayed = True
- rv.linked_to = self
-
- for key, value in iteritems(args):
- if value is not missing:
- setattr(rv, key, value)
-
- if cache_size is not missing:
- rv.cache = create_cache(cache_size)
- else:
- rv.cache = copy_cache(self.cache)
-
- rv.extensions = {}
- for key, value in iteritems(self.extensions):
- rv.extensions[key] = value.bind(rv)
- if extensions is not missing:
- rv.extensions.update(load_extensions(rv, extensions))
-
- return _environment_sanity_check(rv)
-
- lexer = property(get_lexer, doc="The lexer for this environment.")
-
- def iter_extensions(self):
- """Iterates over the extensions by priority."""
+
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.overlayed = True
+ rv.linked_to = self
+
+ for key, value in iteritems(args):
+ if value is not missing:
+ setattr(rv, key, value)
+
+ if cache_size is not missing:
+ rv.cache = create_cache(cache_size)
+ else:
+ rv.cache = copy_cache(self.cache)
+
+ rv.extensions = {}
+ for key, value in iteritems(self.extensions):
+ rv.extensions[key] = value.bind(rv)
+ if extensions is not missing:
+ rv.extensions.update(load_extensions(rv, extensions))
+
+ return _environment_sanity_check(rv)
+
+ lexer = property(get_lexer, doc="The lexer for this environment.")
+
+ def iter_extensions(self):
+ """Iterates over the extensions by priority."""
return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
-
- def getitem(self, obj, argument):
- """Get an item or attribute of an object but prefer the item."""
- try:
- return obj[argument]
- except (AttributeError, TypeError, LookupError):
- if isinstance(argument, string_types):
- try:
- attr = str(argument)
- except Exception:
- pass
- else:
- try:
- return getattr(obj, attr)
- except AttributeError:
- pass
- return self.undefined(obj=obj, name=argument)
-
- def getattr(self, obj, attribute):
- """Get an item or attribute of an object but prefer the attribute.
- Unlike :meth:`getitem` the attribute *must* be a bytestring.
- """
- try:
- return getattr(obj, attribute)
- except AttributeError:
- pass
- try:
- return obj[attribute]
- except (TypeError, LookupError, AttributeError):
- return self.undefined(obj=obj, name=attribute)
-
+
+ def getitem(self, obj, argument):
+ """Get an item or attribute of an object but prefer the item."""
+ try:
+ return obj[argument]
+ except (AttributeError, TypeError, LookupError):
+ if isinstance(argument, string_types):
+ try:
+ attr = str(argument)
+ except Exception:
+ pass
+ else:
+ try:
+ return getattr(obj, attr)
+ except AttributeError:
+ pass
+ return self.undefined(obj=obj, name=argument)
+
+ def getattr(self, obj, attribute):
+ """Get an item or attribute of an object but prefer the attribute.
+ Unlike :meth:`getitem` the attribute *must* be a bytestring.
+ """
+ try:
+ return getattr(obj, attribute)
+ except AttributeError:
+ pass
+ try:
+ return obj[attribute]
+ except (TypeError, LookupError, AttributeError):
+ return self.undefined(obj=obj, name=attribute)
+
def call_filter(
self, name, value, args=None, kwargs=None, context=None, eval_ctx=None
):
- """Invokes a filter on a value the same way the compiler does it.
-
- Note that on Python 3 this might return a coroutine in case the
- filter is running from an environment in async mode and the filter
- supports async execution. It's your responsibility to await this
- if needed.
-
- .. versionadded:: 2.7
- """
- func = self.filters.get(name)
- if func is None:
+ """Invokes a filter on a value the same way the compiler does it.
+
+ Note that on Python 3 this might return a coroutine in case the
+ filter is running from an environment in async mode and the filter
+ supports async execution. It's your responsibility to await this
+ if needed.
+
+ .. versionadded:: 2.7
+ """
+ func = self.filters.get(name)
+ if func is None:
fail_for_missing_callable("no filter named %r", name)
- args = [value] + list(args or ())
+ args = [value] + list(args or ())
if getattr(func, "contextfilter", False) is True:
- if context is None:
+ if context is None:
raise TemplateRuntimeError(
"Attempted to invoke context filter without context"
)
- args.insert(0, context)
+ args.insert(0, context)
elif getattr(func, "evalcontextfilter", False) is True:
- if eval_ctx is None:
- if context is not None:
- eval_ctx = context.eval_ctx
- else:
- eval_ctx = EvalContext(self)
- args.insert(0, eval_ctx)
+ if eval_ctx is None:
+ if context is not None:
+ eval_ctx = context.eval_ctx
+ else:
+ eval_ctx = EvalContext(self)
+ args.insert(0, eval_ctx)
elif getattr(func, "environmentfilter", False) is True:
- args.insert(0, self)
- return func(*args, **(kwargs or {}))
-
- def call_test(self, name, value, args=None, kwargs=None):
- """Invokes a test on a value the same way the compiler does it.
-
- .. versionadded:: 2.7
- """
- func = self.tests.get(name)
- if func is None:
+ args.insert(0, self)
+ return func(*args, **(kwargs or {}))
+
+ def call_test(self, name, value, args=None, kwargs=None):
+ """Invokes a test on a value the same way the compiler does it.
+
+ .. versionadded:: 2.7
+ """
+ func = self.tests.get(name)
+ if func is None:
fail_for_missing_callable("no test named %r", name)
- return func(value, *(args or ()), **(kwargs or {}))
-
- @internalcode
- def parse(self, source, name=None, filename=None):
- """Parse the sourcecode and return the abstract syntax tree. This
- tree of nodes is used by the compiler to convert the template into
- executable source- or bytecode. This is useful for debugging or to
- extract information from templates.
-
+ return func(value, *(args or ()), **(kwargs or {}))
+
+ @internalcode
+ def parse(self, source, name=None, filename=None):
+ """Parse the sourcecode and return the abstract syntax tree. This
+ tree of nodes is used by the compiler to convert the template into
+ executable source- or bytecode. This is useful for debugging or to
+ extract information from templates.
+
If you are :ref:`developing Jinja extensions <writing-extensions>`
- this gives you a good overview of the node tree generated.
- """
- try:
- return self._parse(source, name, filename)
- except TemplateSyntaxError:
+ this gives you a good overview of the node tree generated.
+ """
+ try:
+ return self._parse(source, name, filename)
+ except TemplateSyntaxError:
self.handle_exception(source=source)
-
- def _parse(self, source, name, filename):
- """Internal parsing function used by `parse` and `compile`."""
- return Parser(self, source, name, encode_filename(filename)).parse()
-
- def lex(self, source, name=None, filename=None):
- """Lex the given sourcecode and return a generator that yields
- tokens as tuples in the form ``(lineno, token_type, value)``.
- This can be useful for :ref:`extension development <writing-extensions>`
- and debugging templates.
-
- This does not perform preprocessing. If you want the preprocessing
- of the extensions to be applied you have to filter source through
- the :meth:`preprocess` method.
- """
- source = text_type(source)
- try:
- return self.lexer.tokeniter(source, name, filename)
- except TemplateSyntaxError:
+
+ def _parse(self, source, name, filename):
+ """Internal parsing function used by `parse` and `compile`."""
+ return Parser(self, source, name, encode_filename(filename)).parse()
+
+ def lex(self, source, name=None, filename=None):
+ """Lex the given sourcecode and return a generator that yields
+ tokens as tuples in the form ``(lineno, token_type, value)``.
+ This can be useful for :ref:`extension development <writing-extensions>`
+ and debugging templates.
+
+ This does not perform preprocessing. If you want the preprocessing
+ of the extensions to be applied you have to filter source through
+ the :meth:`preprocess` method.
+ """
+ source = text_type(source)
+ try:
+ return self.lexer.tokeniter(source, name, filename)
+ except TemplateSyntaxError:
self.handle_exception(source=source)
-
- def preprocess(self, source, name=None, filename=None):
- """Preprocesses the source with all extensions. This is automatically
- called for all parsing and compiling methods but *not* for :meth:`lex`
- because there you usually only want the actual source tokenized.
- """
+
+ def preprocess(self, source, name=None, filename=None):
+ """Preprocesses the source with all extensions. This is automatically
+ called for all parsing and compiling methods but *not* for :meth:`lex`
+ because there you usually only want the actual source tokenized.
+ """
return reduce(
lambda s, e: e.preprocess(s, name, filename),
self.iter_extensions(),
text_type(source),
)
-
- def _tokenize(self, source, name, filename=None, state=None):
- """Called by the parser to do the preprocessing and filtering
- for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
- """
- source = self.preprocess(source, name, filename)
- stream = self.lexer.tokenize(source, name, filename, state)
- for ext in self.iter_extensions():
- stream = ext.filter_stream(stream)
- if not isinstance(stream, TokenStream):
- stream = TokenStream(stream, name, filename)
- return stream
-
- def _generate(self, source, name, filename, defer_init=False):
- """Internal hook that can be overridden to hook a different generate
- method in.
-
- .. versionadded:: 2.5
- """
+
+ def _tokenize(self, source, name, filename=None, state=None):
+ """Called by the parser to do the preprocessing and filtering
+ for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
+ """
+ source = self.preprocess(source, name, filename)
+ stream = self.lexer.tokenize(source, name, filename, state)
+ for ext in self.iter_extensions():
+ stream = ext.filter_stream(stream)
+ if not isinstance(stream, TokenStream):
+ stream = TokenStream(stream, name, filename)
+ return stream
+
+ def _generate(self, source, name, filename, defer_init=False):
+ """Internal hook that can be overridden to hook a different generate
+ method in.
+
+ .. versionadded:: 2.5
+ """
return generate(
source,
self,
@@ -591,96 +591,96 @@ class Environment(object):
defer_init=defer_init,
optimized=self.optimized,
)
-
- def _compile(self, source, filename):
- """Internal hook that can be overridden to hook a different compile
- method in.
-
- .. versionadded:: 2.5
- """
+
+ def _compile(self, source, filename):
+ """Internal hook that can be overridden to hook a different compile
+ method in.
+
+ .. versionadded:: 2.5
+ """
return compile(source, filename, "exec")
-
- @internalcode
+
+ @internalcode
def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
- """Compile a node or template source code. The `name` parameter is
- the load name of the template after it was joined using
- :meth:`join_path` if necessary, not the filename on the file system.
- the `filename` parameter is the estimated filename of the template on
- the file system. If the template came from a database or memory this
- can be omitted.
-
- The return value of this method is a python code object. If the `raw`
- parameter is `True` the return value will be a string with python
- code equivalent to the bytecode returned otherwise. This method is
- mainly used internally.
-
- `defer_init` is use internally to aid the module code generator. This
- causes the generated code to be able to import without the global
- environment variable to be set.
-
- .. versionadded:: 2.4
- `defer_init` parameter added.
- """
- source_hint = None
- try:
- if isinstance(source, string_types):
- source_hint = source
- source = self._parse(source, name, filename)
+ """Compile a node or template source code. The `name` parameter is
+ the load name of the template after it was joined using
+ :meth:`join_path` if necessary, not the filename on the file system.
+ the `filename` parameter is the estimated filename of the template on
+ the file system. If the template came from a database or memory this
+ can be omitted.
+
+ The return value of this method is a python code object. If the `raw`
+ parameter is `True` the return value will be a string with python
+ code equivalent to the bytecode returned otherwise. This method is
+ mainly used internally.
+
+ `defer_init` is use internally to aid the module code generator. This
+ causes the generated code to be able to import without the global
+ environment variable to be set.
+
+ .. versionadded:: 2.4
+ `defer_init` parameter added.
+ """
+ source_hint = None
+ try:
+ if isinstance(source, string_types):
+ source_hint = source
+ source = self._parse(source, name, filename)
source = self._generate(source, name, filename, defer_init=defer_init)
- if raw:
- return source
- if filename is None:
+ if raw:
+ return source
+ if filename is None:
filename = "<template>"
- else:
- filename = encode_filename(filename)
- return self._compile(source, filename)
- except TemplateSyntaxError:
+ else:
+ filename = encode_filename(filename)
+ return self._compile(source, filename)
+ except TemplateSyntaxError:
self.handle_exception(source=source_hint)
-
- def compile_expression(self, source, undefined_to_none=True):
- """A handy helper method that returns a callable that accepts keyword
- arguments that appear as variables in the expression. If called it
- returns the result of the expression.
-
- This is useful if applications want to use the same rules as Jinja
- in template "configuration files" or similar situations.
-
- Example usage:
-
- >>> env = Environment()
- >>> expr = env.compile_expression('foo == 42')
- >>> expr(foo=23)
- False
- >>> expr(foo=42)
- True
-
- Per default the return value is converted to `None` if the
- expression returns an undefined value. This can be changed
- by setting `undefined_to_none` to `False`.
-
- >>> env.compile_expression('var')() is None
- True
- >>> env.compile_expression('var', undefined_to_none=False)()
- Undefined
-
- .. versionadded:: 2.1
- """
+
+ def compile_expression(self, source, undefined_to_none=True):
+ """A handy helper method that returns a callable that accepts keyword
+ arguments that appear as variables in the expression. If called it
+ returns the result of the expression.
+
+ This is useful if applications want to use the same rules as Jinja
+ in template "configuration files" or similar situations.
+
+ Example usage:
+
+ >>> env = Environment()
+ >>> expr = env.compile_expression('foo == 42')
+ >>> expr(foo=23)
+ False
+ >>> expr(foo=42)
+ True
+
+ Per default the return value is converted to `None` if the
+ expression returns an undefined value. This can be changed
+ by setting `undefined_to_none` to `False`.
+
+ >>> env.compile_expression('var')() is None
+ True
+ >>> env.compile_expression('var', undefined_to_none=False)()
+ Undefined
+
+ .. versionadded:: 2.1
+ """
parser = Parser(self, source, state="variable")
- try:
- expr = parser.parse_expression()
- if not parser.stream.eos:
+ try:
+ expr = parser.parse_expression()
+ if not parser.stream.eos:
raise TemplateSyntaxError(
"chunk after expression", parser.stream.current.lineno, None, None
)
- expr.set_environment(self)
- except TemplateSyntaxError:
+ expr.set_environment(self)
+ except TemplateSyntaxError:
if sys.exc_info() is not None:
self.handle_exception(source=source)
body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
- template = self.from_string(nodes.Template(body, lineno=1))
- return TemplateExpression(template, undefined_to_none)
-
+ template = self.from_string(nodes.Template(body, lineno=1))
+ return TemplateExpression(template, undefined_to_none)
+
def compile_templates(
self,
target,
@@ -691,37 +691,37 @@ class Environment(object):
ignore_errors=True,
py_compile=False,
):
- """Finds all the templates the loader can find, compiles them
- and stores them in `target`. If `zip` is `None`, instead of in a
- zipfile, the templates will be stored in a directory.
- By default a deflate zip algorithm is used. To switch to
- the stored algorithm, `zip` can be set to ``'stored'``.
-
- `extensions` and `filter_func` are passed to :meth:`list_templates`.
- Each template returned will be compiled to the target folder or
- zipfile.
-
- By default template compilation errors are ignored. In case a
- log function is provided, errors are logged. If you want template
- syntax errors to abort the compilation you can set `ignore_errors`
- to `False` and you will get an exception on syntax errors.
-
- If `py_compile` is set to `True` .pyc files will be written to the
- target instead of standard .py files. This flag does not do anything
- on pypy and Python 3 where pyc files are not picked up by itself and
- don't give much benefit.
-
- .. versionadded:: 2.4
- """
+ """Finds all the templates the loader can find, compiles them
+ and stores them in `target`. If `zip` is `None`, instead of in a
+ zipfile, the templates will be stored in a directory.
+ By default a deflate zip algorithm is used. To switch to
+ the stored algorithm, `zip` can be set to ``'stored'``.
+
+ `extensions` and `filter_func` are passed to :meth:`list_templates`.
+ Each template returned will be compiled to the target folder or
+ zipfile.
+
+ By default template compilation errors are ignored. In case a
+ log function is provided, errors are logged. If you want template
+ syntax errors to abort the compilation you can set `ignore_errors`
+ to `False` and you will get an exception on syntax errors.
+
+ If `py_compile` is set to `True` .pyc files will be written to the
+ target instead of standard .py files. This flag does not do anything
+ on pypy and Python 3 where pyc files are not picked up by itself and
+ don't give much benefit.
+
+ .. versionadded:: 2.4
+ """
from .loaders import ModuleLoader
-
- if log_function is None:
-
+
+ if log_function is None:
+
def log_function(x):
pass
- if py_compile:
- if not PY2 or PYPY:
+ if py_compile:
+ if not PY2 or PYPY:
import warnings
warnings.warn(
@@ -730,87 +730,87 @@ class Environment(object):
DeprecationWarning,
stacklevel=2,
)
- py_compile = False
- else:
- import imp
- import marshal
-
+ py_compile = False
+ else:
+ import imp
+ import marshal
+
py_header = imp.get_magic() + u"\xff\xff\xff\xff".encode("iso-8859-15")
- # Python 3.3 added a source filesize to the header
- if sys.version_info >= (3, 3):
+ # Python 3.3 added a source filesize to the header
+ if sys.version_info >= (3, 3):
py_header += u"\x00\x00\x00\x00".encode("iso-8859-15")
-
+
def write_file(filename, data):
- if zip:
- info = ZipInfo(filename)
- info.external_attr = 0o755 << 16
- zip_file.writestr(info, data)
- else:
+ if zip:
+ info = ZipInfo(filename)
+ info.external_attr = 0o755 << 16
+ zip_file.writestr(info, data)
+ else:
if isinstance(data, text_type):
data = data.encode("utf8")
with open(os.path.join(target, filename), "wb") as f:
- f.write(data)
-
- if zip is not None:
- from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
+ f.write(data)
+
+ if zip is not None:
+ from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
zip_file = ZipFile(
target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
)
- log_function('Compiling into Zip archive "%s"' % target)
- else:
- if not os.path.isdir(target):
- os.makedirs(target)
- log_function('Compiling into folder "%s"' % target)
-
- try:
- for name in self.list_templates(extensions, filter_func):
- source, filename, _ = self.loader.get_source(self, name)
- try:
- code = self.compile(source, name, filename, True, True)
- except TemplateSyntaxError as e:
- if not ignore_errors:
- raise
- log_function('Could not compile "%s": %s' % (name, e))
- continue
-
- filename = ModuleLoader.get_module_filename(name)
-
- if py_compile:
- c = self._compile(code, encode_filename(filename))
+ log_function('Compiling into Zip archive "%s"' % target)
+ else:
+ if not os.path.isdir(target):
+ os.makedirs(target)
+ log_function('Compiling into folder "%s"' % target)
+
+ try:
+ for name in self.list_templates(extensions, filter_func):
+ source, filename, _ = self.loader.get_source(self, name)
+ try:
+ code = self.compile(source, name, filename, True, True)
+ except TemplateSyntaxError as e:
+ if not ignore_errors:
+ raise
+ log_function('Could not compile "%s": %s' % (name, e))
+ continue
+
+ filename = ModuleLoader.get_module_filename(name)
+
+ if py_compile:
+ c = self._compile(code, encode_filename(filename))
write_file(filename + "c", py_header + marshal.dumps(c))
log_function('Byte-compiled "%s" as %s' % (name, filename + "c"))
- else:
+ else:
write_file(filename, code)
- log_function('Compiled "%s" as %s' % (name, filename))
- finally:
- if zip:
- zip_file.close()
-
+ log_function('Compiled "%s" as %s' % (name, filename))
+ finally:
+ if zip:
+ zip_file.close()
+
log_function("Finished compiling templates")
-
- def list_templates(self, extensions=None, filter_func=None):
- """Returns a list of templates for this environment. This requires
- that the loader supports the loader's
- :meth:`~BaseLoader.list_templates` method.
-
- If there are other files in the template folder besides the
- actual templates, the returned list can be filtered. There are two
- ways: either `extensions` is set to a list of file extensions for
- templates, or a `filter_func` can be provided which is a callable that
- is passed a template name and should return `True` if it should end up
- in the result list.
-
- If the loader does not support that, a :exc:`TypeError` is raised.
-
- .. versionadded:: 2.4
- """
+
+ def list_templates(self, extensions=None, filter_func=None):
+ """Returns a list of templates for this environment. This requires
+ that the loader supports the loader's
+ :meth:`~BaseLoader.list_templates` method.
+
+ If there are other files in the template folder besides the
+ actual templates, the returned list can be filtered. There are two
+ ways: either `extensions` is set to a list of file extensions for
+ templates, or a `filter_func` can be provided which is a callable that
+ is passed a template name and should return `True` if it should end up
+ in the result list.
+
+ If the loader does not support that, a :exc:`TypeError` is raised.
+
+ .. versionadded:: 2.4
+ """
names = self.loader.list_templates()
- if extensions is not None:
- if filter_func is not None:
+ if extensions is not None:
+ if filter_func is not None:
raise TypeError(
"either extensions or filter_func can be passed, but not both"
)
@@ -818,165 +818,165 @@ class Environment(object):
def filter_func(x):
return "." in x and x.rsplit(".", 1)[1] in extensions
- if filter_func is not None:
+ if filter_func is not None:
names = [name for name in names if filter_func(name)]
-
+
return names
def handle_exception(self, source=None):
- """Exception handling helper. This is used internally to either raise
- rewritten exceptions or return a rendered traceback for the template.
- """
+ """Exception handling helper. This is used internally to either raise
+ rewritten exceptions or return a rendered traceback for the template.
+ """
from .debug import rewrite_traceback_stack
-
+
reraise(*rewrite_traceback_stack(source=source))
-
- def join_path(self, template, parent):
- """Join a template with the parent. By default all the lookups are
- relative to the loader root so this method returns the `template`
- parameter unchanged, but if the paths should be relative to the
- parent template, this function can be used to calculate the real
- template name.
-
- Subclasses may override this method and implement template path
- joining here.
- """
- return template
-
- @internalcode
- def _load_template(self, name, globals):
- if self.loader is None:
+
+ def join_path(self, template, parent):
+ """Join a template with the parent. By default all the lookups are
+ relative to the loader root so this method returns the `template`
+ parameter unchanged, but if the paths should be relative to the
+ parent template, this function can be used to calculate the real
+ template name.
+
+ Subclasses may override this method and implement template path
+ joining here.
+ """
+ return template
+
+ @internalcode
+ def _load_template(self, name, globals):
+ if self.loader is None:
raise TypeError("no loader for this environment specified")
- cache_key = (weakref.ref(self.loader), name)
- if self.cache is not None:
- template = self.cache.get(cache_key)
+ cache_key = (weakref.ref(self.loader), name)
+ if self.cache is not None:
+ template = self.cache.get(cache_key)
if template is not None and (
not self.auto_reload or template.is_up_to_date
):
- return template
- template = self.loader.load(self, name, globals)
- if self.cache is not None:
- self.cache[cache_key] = template
- return template
-
- @internalcode
- def get_template(self, name, parent=None, globals=None):
- """Load a template from the loader. If a loader is configured this
- method asks the loader for the template and returns a :class:`Template`.
- If the `parent` parameter is not `None`, :meth:`join_path` is called
- to get the real template name before loading.
-
- The `globals` parameter can be used to provide template wide globals.
- These variables are available in the context at render time.
-
- If the template does not exist a :exc:`TemplateNotFound` exception is
- raised.
-
- .. versionchanged:: 2.4
- If `name` is a :class:`Template` object it is returned from the
- function unchanged.
- """
- if isinstance(name, Template):
- return name
- if parent is not None:
- name = self.join_path(name, parent)
- return self._load_template(name, self.make_globals(globals))
-
- @internalcode
- def select_template(self, names, parent=None, globals=None):
- """Works like :meth:`get_template` but tries a number of templates
- before it fails. If it cannot find any of the templates, it will
- raise a :exc:`TemplatesNotFound` exception.
-
+ return template
+ template = self.loader.load(self, name, globals)
+ if self.cache is not None:
+ self.cache[cache_key] = template
+ return template
+
+ @internalcode
+ def get_template(self, name, parent=None, globals=None):
+ """Load a template from the loader. If a loader is configured this
+ method asks the loader for the template and returns a :class:`Template`.
+ If the `parent` parameter is not `None`, :meth:`join_path` is called
+ to get the real template name before loading.
+
+ The `globals` parameter can be used to provide template wide globals.
+ These variables are available in the context at render time.
+
+ If the template does not exist a :exc:`TemplateNotFound` exception is
+ raised.
+
+ .. versionchanged:: 2.4
+ If `name` is a :class:`Template` object it is returned from the
+ function unchanged.
+ """
+ if isinstance(name, Template):
+ return name
+ if parent is not None:
+ name = self.join_path(name, parent)
+ return self._load_template(name, self.make_globals(globals))
+
+ @internalcode
+ def select_template(self, names, parent=None, globals=None):
+ """Works like :meth:`get_template` but tries a number of templates
+ before it fails. If it cannot find any of the templates, it will
+ raise a :exc:`TemplatesNotFound` exception.
+
.. versionchanged:: 2.11
If names is :class:`Undefined`, an :exc:`UndefinedError` is
raised instead. If no templates were found and names
contains :class:`Undefined`, the message is more helpful.
-
- .. versionchanged:: 2.4
- If `names` contains a :class:`Template` object it is returned
- from the function unchanged.
+
+ .. versionchanged:: 2.4
+ If `names` contains a :class:`Template` object it is returned
+ from the function unchanged.
.. versionadded:: 2.3
- """
+ """
if isinstance(names, Undefined):
names._fail_with_undefined_error()
- if not names:
+ if not names:
raise TemplatesNotFound(
message=u"Tried to select from an empty list " u"of templates."
)
- globals = self.make_globals(globals)
- for name in names:
- if isinstance(name, Template):
- return name
- if parent is not None:
- name = self.join_path(name, parent)
- try:
- return self._load_template(name, globals)
+ globals = self.make_globals(globals)
+ for name in names:
+ if isinstance(name, Template):
+ return name
+ if parent is not None:
+ name = self.join_path(name, parent)
+ try:
+ return self._load_template(name, globals)
except (TemplateNotFound, UndefinedError):
- pass
- raise TemplatesNotFound(names)
-
- @internalcode
+ pass
+ raise TemplatesNotFound(names)
+
+ @internalcode
def get_or_select_template(self, template_name_or_list, parent=None, globals=None):
- """Does a typecheck and dispatches to :meth:`select_template`
- if an iterable of template names is given, otherwise to
- :meth:`get_template`.
-
- .. versionadded:: 2.3
- """
+ """Does a typecheck and dispatches to :meth:`select_template`
+ if an iterable of template names is given, otherwise to
+ :meth:`get_template`.
+
+ .. versionadded:: 2.3
+ """
if isinstance(template_name_or_list, (string_types, Undefined)):
- return self.get_template(template_name_or_list, parent, globals)
- elif isinstance(template_name_or_list, Template):
- return template_name_or_list
- return self.select_template(template_name_or_list, parent, globals)
-
- def from_string(self, source, globals=None, template_class=None):
- """Load a template from a string. This parses the source given and
- returns a :class:`Template` object.
- """
- globals = self.make_globals(globals)
- cls = template_class or self.template_class
- return cls.from_code(self, self.compile(source), globals, None)
-
- def make_globals(self, d):
- """Return a dict for the globals."""
- if not d:
- return self.globals
- return dict(self.globals, **d)
-
-
-class Template(object):
- """The central template object. This class represents a compiled template
- and is used to evaluate it.
-
- Normally the template object is generated from an :class:`Environment` but
- it also has a constructor that makes it possible to create a template
- instance directly using the constructor. It takes the same arguments as
- the environment constructor but it's not possible to specify a loader.
-
- Every template object has a few methods and members that are guaranteed
- to exist. However it's important that a template object should be
- considered immutable. Modifications on the object are not supported.
-
- Template objects created from the constructor rather than an environment
- do have an `environment` attribute that points to a temporary environment
- that is probably shared with other templates created with the constructor
- and compatible settings.
-
- >>> template = Template('Hello {{ name }}!')
- >>> template.render(name='John Doe') == u'Hello John Doe!'
- True
- >>> stream = template.stream(name='John Doe')
- >>> next(stream) == u'Hello John Doe!'
- True
- >>> next(stream)
- Traceback (most recent call last):
- ...
- StopIteration
- """
-
+ return self.get_template(template_name_or_list, parent, globals)
+ elif isinstance(template_name_or_list, Template):
+ return template_name_or_list
+ return self.select_template(template_name_or_list, parent, globals)
+
+ def from_string(self, source, globals=None, template_class=None):
+ """Load a template from a string. This parses the source given and
+ returns a :class:`Template` object.
+ """
+ globals = self.make_globals(globals)
+ cls = template_class or self.template_class
+ return cls.from_code(self, self.compile(source), globals, None)
+
+ def make_globals(self, d):
+ """Return a dict for the globals."""
+ if not d:
+ return self.globals
+ return dict(self.globals, **d)
+
+
+class Template(object):
+ """The central template object. This class represents a compiled template
+ and is used to evaluate it.
+
+ Normally the template object is generated from an :class:`Environment` but
+ it also has a constructor that makes it possible to create a template
+ instance directly using the constructor. It takes the same arguments as
+ the environment constructor but it's not possible to specify a loader.
+
+ Every template object has a few methods and members that are guaranteed
+ to exist. However it's important that a template object should be
+ considered immutable. Modifications on the object are not supported.
+
+ Template objects created from the constructor rather than an environment
+ do have an `environment` attribute that points to a temporary environment
+ that is probably shared with other templates created with the constructor
+ and compatible settings.
+
+ >>> template = Template('Hello {{ name }}!')
+ >>> template.render(name='John Doe') == u'Hello John Doe!'
+ True
+ >>> stream = template.stream(name='John Doe')
+ >>> next(stream) == u'Hello John Doe!'
+ True
+ >>> next(stream)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ """
+
#: Type of environment to create when creating a template directly
#: rather than through an existing environment.
environment_class = Environment
@@ -1003,7 +1003,7 @@ class Template(object):
autoescape=False,
enable_async=False,
):
- env = get_spontaneous_environment(
+ env = get_spontaneous_environment(
cls.environment_class,
block_start_string,
block_end_string,
@@ -1028,335 +1028,335 @@ class Template(object):
None,
enable_async,
)
- return env.from_string(source, template_class=cls)
-
- @classmethod
- def from_code(cls, environment, code, globals, uptodate=None):
- """Creates a template object from compiled code and the globals. This
- is used by the loaders and environment to create a template object.
- """
+ return env.from_string(source, template_class=cls)
+
+ @classmethod
+ def from_code(cls, environment, code, globals, uptodate=None):
+ """Creates a template object from compiled code and the globals. This
+ is used by the loaders and environment to create a template object.
+ """
namespace = {"environment": environment, "__file__": code.co_filename}
- exec(code, namespace)
- rv = cls._from_namespace(environment, namespace, globals)
- rv._uptodate = uptodate
- return rv
-
- @classmethod
- def from_module_dict(cls, environment, module_dict, globals):
- """Creates a template object from a module. This is used by the
- module loader to create a template object.
-
- .. versionadded:: 2.4
- """
- return cls._from_namespace(environment, module_dict, globals)
-
- @classmethod
- def _from_namespace(cls, environment, namespace, globals):
- t = object.__new__(cls)
- t.environment = environment
- t.globals = globals
+ exec(code, namespace)
+ rv = cls._from_namespace(environment, namespace, globals)
+ rv._uptodate = uptodate
+ return rv
+
+ @classmethod
+ def from_module_dict(cls, environment, module_dict, globals):
+ """Creates a template object from a module. This is used by the
+ module loader to create a template object.
+
+ .. versionadded:: 2.4
+ """
+ return cls._from_namespace(environment, module_dict, globals)
+
+ @classmethod
+ def _from_namespace(cls, environment, namespace, globals):
+ t = object.__new__(cls)
+ t.environment = environment
+ t.globals = globals
t.name = namespace["name"]
t.filename = namespace["__file__"]
t.blocks = namespace["blocks"]
-
- # render function and module
+
+ # render function and module
t.root_render_func = namespace["root"]
- t._module = None
-
- # debug and loader helpers
+ t._module = None
+
+ # debug and loader helpers
t._debug_info = namespace["debug_info"]
- t._uptodate = None
-
- # store the reference
+ t._uptodate = None
+
+ # store the reference
namespace["environment"] = environment
namespace["__jinja_template__"] = t
-
- return t
-
- def render(self, *args, **kwargs):
- """This method accepts the same arguments as the `dict` constructor:
- A dict, a dict subclass or some keyword arguments. If no arguments
- are given the context will be empty. These two calls do the same::
-
- template.render(knights='that say nih')
- template.render({'knights': 'that say nih'})
-
- This will return the rendered template as unicode string.
- """
- vars = dict(*args, **kwargs)
- try:
- return concat(self.root_render_func(self.new_context(vars)))
- except Exception:
+
+ return t
+
+ def render(self, *args, **kwargs):
+ """This method accepts the same arguments as the `dict` constructor:
+ A dict, a dict subclass or some keyword arguments. If no arguments
+ are given the context will be empty. These two calls do the same::
+
+ template.render(knights='that say nih')
+ template.render({'knights': 'that say nih'})
+
+ This will return the rendered template as unicode string.
+ """
+ vars = dict(*args, **kwargs)
+ try:
+ return concat(self.root_render_func(self.new_context(vars)))
+ except Exception:
self.environment.handle_exception()
-
- def render_async(self, *args, **kwargs):
- """This works similar to :meth:`render` but returns a coroutine
- that when awaited returns the entire rendered template string. This
- requires the async feature to be enabled.
-
- Example usage::
-
- await template.render_async(knights='that say nih; asynchronously')
- """
- # see asyncsupport for the actual implementation
+
+ def render_async(self, *args, **kwargs):
+ """This works similar to :meth:`render` but returns a coroutine
+ that when awaited returns the entire rendered template string. This
+ requires the async feature to be enabled.
+
+ Example usage::
+
+ await template.render_async(knights='that say nih; asynchronously')
+ """
+ # see asyncsupport for the actual implementation
raise NotImplementedError(
"This feature is not available for this version of Python"
)
-
- def stream(self, *args, **kwargs):
- """Works exactly like :meth:`generate` but returns a
- :class:`TemplateStream`.
- """
- return TemplateStream(self.generate(*args, **kwargs))
-
- def generate(self, *args, **kwargs):
- """For very large templates it can be useful to not render the whole
- template at once but evaluate each statement after another and yield
- piece for piece. This method basically does exactly that and returns
- a generator that yields one item after another as unicode strings.
-
- It accepts the same arguments as :meth:`render`.
- """
- vars = dict(*args, **kwargs)
- try:
- for event in self.root_render_func(self.new_context(vars)):
- yield event
- except Exception:
+
+ def stream(self, *args, **kwargs):
+ """Works exactly like :meth:`generate` but returns a
+ :class:`TemplateStream`.
+ """
+ return TemplateStream(self.generate(*args, **kwargs))
+
+ def generate(self, *args, **kwargs):
+ """For very large templates it can be useful to not render the whole
+ template at once but evaluate each statement after another and yield
+ piece for piece. This method basically does exactly that and returns
+ a generator that yields one item after another as unicode strings.
+
+ It accepts the same arguments as :meth:`render`.
+ """
+ vars = dict(*args, **kwargs)
+ try:
+ for event in self.root_render_func(self.new_context(vars)):
+ yield event
+ except Exception:
yield self.environment.handle_exception()
-
- def generate_async(self, *args, **kwargs):
- """An async version of :meth:`generate`. Works very similarly but
- returns an async iterator instead.
- """
- # see asyncsupport for the actual implementation
+
+ def generate_async(self, *args, **kwargs):
+ """An async version of :meth:`generate`. Works very similarly but
+ returns an async iterator instead.
+ """
+ # see asyncsupport for the actual implementation
raise NotImplementedError(
"This feature is not available for this version of Python"
)
-
- def new_context(self, vars=None, shared=False, locals=None):
- """Create a new :class:`Context` for this template. The vars
- provided will be passed to the template. Per default the globals
- are added to the context. If shared is set to `True` the data
+
+ def new_context(self, vars=None, shared=False, locals=None):
+ """Create a new :class:`Context` for this template. The vars
+ provided will be passed to the template. Per default the globals
+ are added to the context. If shared is set to `True` the data
is passed as is to the context without adding the globals.
-
- `locals` can be a dict of local variables for internal usage.
- """
+
+ `locals` can be a dict of local variables for internal usage.
+ """
return new_context(
self.environment, self.name, self.blocks, vars, shared, self.globals, locals
)
-
- def make_module(self, vars=None, shared=False, locals=None):
- """This method works like the :attr:`module` attribute when called
- without arguments but it will evaluate the template on every call
- rather than caching it. It's also possible to provide
- a dict which is then used as context. The arguments are the same
- as for the :meth:`new_context` method.
- """
- return TemplateModule(self, self.new_context(vars, shared, locals))
-
- def make_module_async(self, vars=None, shared=False, locals=None):
- """As template module creation can invoke template code for
+
+ def make_module(self, vars=None, shared=False, locals=None):
+ """This method works like the :attr:`module` attribute when called
+ without arguments but it will evaluate the template on every call
+ rather than caching it. It's also possible to provide
+ a dict which is then used as context. The arguments are the same
+ as for the :meth:`new_context` method.
+ """
+ return TemplateModule(self, self.new_context(vars, shared, locals))
+
+ def make_module_async(self, vars=None, shared=False, locals=None):
+ """As template module creation can invoke template code for
asynchronous executions this method must be used instead of the
- normal :meth:`make_module` one. Likewise the module attribute
- becomes unavailable in async mode.
- """
- # see asyncsupport for the actual implementation
+ normal :meth:`make_module` one. Likewise the module attribute
+ becomes unavailable in async mode.
+ """
+ # see asyncsupport for the actual implementation
raise NotImplementedError(
"This feature is not available for this version of Python"
)
-
- @internalcode
- def _get_default_module(self):
- if self._module is not None:
- return self._module
- self._module = rv = self.make_module()
- return rv
-
- @property
- def module(self):
- """The template as module. This is used for imports in the
- template runtime but is also useful if one wants to access
- exported template variables from the Python layer:
-
- >>> t = Template('{% macro foo() %}42{% endmacro %}23')
- >>> str(t.module)
- '23'
- >>> t.module.foo() == u'42'
- True
-
- This attribute is not available if async mode is enabled.
- """
- return self._get_default_module()
-
- def get_corresponding_lineno(self, lineno):
- """Return the source line number of a line number in the
- generated bytecode as they are not in sync.
- """
- for template_line, code_line in reversed(self.debug_info):
- if code_line <= lineno:
- return template_line
- return 1
-
- @property
- def is_up_to_date(self):
- """If this variable is `False` there is a newer version available."""
- if self._uptodate is None:
- return True
- return self._uptodate()
-
- @property
- def debug_info(self):
- """The debug info mapping."""
+
+ @internalcode
+ def _get_default_module(self):
+ if self._module is not None:
+ return self._module
+ self._module = rv = self.make_module()
+ return rv
+
+ @property
+ def module(self):
+ """The template as module. This is used for imports in the
+ template runtime but is also useful if one wants to access
+ exported template variables from the Python layer:
+
+ >>> t = Template('{% macro foo() %}42{% endmacro %}23')
+ >>> str(t.module)
+ '23'
+ >>> t.module.foo() == u'42'
+ True
+
+ This attribute is not available if async mode is enabled.
+ """
+ return self._get_default_module()
+
+ def get_corresponding_lineno(self, lineno):
+ """Return the source line number of a line number in the
+ generated bytecode as they are not in sync.
+ """
+ for template_line, code_line in reversed(self.debug_info):
+ if code_line <= lineno:
+ return template_line
+ return 1
+
+ @property
+ def is_up_to_date(self):
+ """If this variable is `False` there is a newer version available."""
+ if self._uptodate is None:
+ return True
+ return self._uptodate()
+
+ @property
+ def debug_info(self):
+ """The debug info mapping."""
if self._debug_info:
return [tuple(map(int, x.split("="))) for x in self._debug_info.split("&")]
return []
-
- def __repr__(self):
- if self.name is None:
+
+ def __repr__(self):
+ if self.name is None:
name = "memory:%x" % id(self)
- else:
- name = repr(self.name)
+ else:
+ name = repr(self.name)
return "<%s %s>" % (self.__class__.__name__, name)
-
-
-@implements_to_string
-class TemplateModule(object):
- """Represents an imported template. All the exported names of the
- template are available as attributes on this object. Additionally
- converting it into an unicode- or bytestrings renders the contents.
- """
-
- def __init__(self, template, context, body_stream=None):
- if body_stream is None:
- if context.environment.is_async:
+
+
+@implements_to_string
+class TemplateModule(object):
+ """Represents an imported template. All the exported names of the
+ template are available as attributes on this object. Additionally
+ converting it into an unicode- or bytestrings renders the contents.
+ """
+
+ def __init__(self, template, context, body_stream=None):
+ if body_stream is None:
+ if context.environment.is_async:
raise RuntimeError(
"Async mode requires a body stream "
"to be passed to a template module. Use "
"the async methods of the API you are "
"using."
)
- body_stream = list(template.root_render_func(context))
- self._body_stream = body_stream
- self.__dict__.update(context.get_exported())
- self.__name__ = template.name
-
- def __html__(self):
- return Markup(concat(self._body_stream))
-
- def __str__(self):
- return concat(self._body_stream)
-
- def __repr__(self):
- if self.__name__ is None:
+ body_stream = list(template.root_render_func(context))
+ self._body_stream = body_stream
+ self.__dict__.update(context.get_exported())
+ self.__name__ = template.name
+
+ def __html__(self):
+ return Markup(concat(self._body_stream))
+
+ def __str__(self):
+ return concat(self._body_stream)
+
+ def __repr__(self):
+ if self.__name__ is None:
name = "memory:%x" % id(self)
- else:
- name = repr(self.__name__)
+ else:
+ name = repr(self.__name__)
return "<%s %s>" % (self.__class__.__name__, name)
-
-
-class TemplateExpression(object):
- """The :meth:`jinja2.Environment.compile_expression` method returns an
- instance of this object. It encapsulates the expression-like access
- to the template with an expression it wraps.
- """
-
- def __init__(self, template, undefined_to_none):
- self._template = template
- self._undefined_to_none = undefined_to_none
-
- def __call__(self, *args, **kwargs):
- context = self._template.new_context(dict(*args, **kwargs))
- consume(self._template.root_render_func(context))
+
+
+class TemplateExpression(object):
+ """The :meth:`jinja2.Environment.compile_expression` method returns an
+ instance of this object. It encapsulates the expression-like access
+ to the template with an expression it wraps.
+ """
+
+ def __init__(self, template, undefined_to_none):
+ self._template = template
+ self._undefined_to_none = undefined_to_none
+
+ def __call__(self, *args, **kwargs):
+ context = self._template.new_context(dict(*args, **kwargs))
+ consume(self._template.root_render_func(context))
rv = context.vars["result"]
- if self._undefined_to_none and isinstance(rv, Undefined):
- rv = None
- return rv
-
-
-@implements_iterator
-class TemplateStream(object):
- """A template stream works pretty much like an ordinary python generator
- but it can buffer multiple items to reduce the number of total iterations.
- Per default the output is unbuffered which means that for every unbuffered
- instruction in the template one unicode string is yielded.
-
- If buffering is enabled with a buffer size of 5, five items are combined
- into a new unicode string. This is mainly useful if you are streaming
- big templates to a client via WSGI which flushes after each iteration.
- """
-
- def __init__(self, gen):
- self._gen = gen
- self.disable_buffering()
-
+ if self._undefined_to_none and isinstance(rv, Undefined):
+ rv = None
+ return rv
+
+
+@implements_iterator
+class TemplateStream(object):
+ """A template stream works pretty much like an ordinary python generator
+ but it can buffer multiple items to reduce the number of total iterations.
+ Per default the output is unbuffered which means that for every unbuffered
+ instruction in the template one unicode string is yielded.
+
+ If buffering is enabled with a buffer size of 5, five items are combined
+ into a new unicode string. This is mainly useful if you are streaming
+ big templates to a client via WSGI which flushes after each iteration.
+ """
+
+ def __init__(self, gen):
+ self._gen = gen
+ self.disable_buffering()
+
def dump(self, fp, encoding=None, errors="strict"):
- """Dump the complete stream into a file or file-like object.
- Per default unicode strings are written, if you want to encode
- before writing specify an `encoding`.
-
- Example usage::
-
- Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
- """
- close = False
- if isinstance(fp, string_types):
- if encoding is None:
+ """Dump the complete stream into a file or file-like object.
+ Per default unicode strings are written, if you want to encode
+ before writing specify an `encoding`.
+
+ Example usage::
+
+ Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
+ """
+ close = False
+ if isinstance(fp, string_types):
+ if encoding is None:
encoding = "utf-8"
fp = open(fp, "wb")
- close = True
- try:
- if encoding is not None:
- iterable = (x.encode(encoding, errors) for x in self)
- else:
- iterable = self
+ close = True
+ try:
+ if encoding is not None:
+ iterable = (x.encode(encoding, errors) for x in self)
+ else:
+ iterable = self
if hasattr(fp, "writelines"):
- fp.writelines(iterable)
- else:
- for item in iterable:
- fp.write(item)
- finally:
- if close:
- fp.close()
-
- def disable_buffering(self):
- """Disable the output buffering."""
- self._next = partial(next, self._gen)
- self.buffered = False
-
- def _buffered_generator(self, size):
- buf = []
- c_size = 0
- push = buf.append
-
- while 1:
- try:
- while c_size < size:
- c = next(self._gen)
- push(c)
- if c:
- c_size += 1
- except StopIteration:
- if not c_size:
- return
- yield concat(buf)
- del buf[:]
- c_size = 0
-
- def enable_buffering(self, size=5):
- """Enable buffering. Buffer `size` items before yielding them."""
- if size <= 1:
+ fp.writelines(iterable)
+ else:
+ for item in iterable:
+ fp.write(item)
+ finally:
+ if close:
+ fp.close()
+
+ def disable_buffering(self):
+ """Disable the output buffering."""
+ self._next = partial(next, self._gen)
+ self.buffered = False
+
+ def _buffered_generator(self, size):
+ buf = []
+ c_size = 0
+ push = buf.append
+
+ while 1:
+ try:
+ while c_size < size:
+ c = next(self._gen)
+ push(c)
+ if c:
+ c_size += 1
+ except StopIteration:
+ if not c_size:
+ return
+ yield concat(buf)
+ del buf[:]
+ c_size = 0
+
+ def enable_buffering(self, size=5):
+ """Enable buffering. Buffer `size` items before yielding them."""
+ if size <= 1:
raise ValueError("buffer size too small")
-
- self.buffered = True
- self._next = partial(next, self._buffered_generator(size))
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self._next()
-
-
-# hook in default template class. if anyone reads this comment: ignore that
-# it's possible to use custom templates ;-)
-Environment.template_class = Template
+
+ self.buffered = True
+ self._next = partial(next, self._buffered_generator(size))
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return self._next()
+
+
+# hook in default template class. if anyone reads this comment: ignore that
+# it's possible to use custom templates ;-)
+Environment.template_class = Template
diff --git a/contrib/python/Jinja2/py2/jinja2/exceptions.py b/contrib/python/Jinja2/py2/jinja2/exceptions.py
index 0bf2003e30..d8b8456be7 100644
--- a/contrib/python/Jinja2/py2/jinja2/exceptions.py
+++ b/contrib/python/Jinja2/py2/jinja2/exceptions.py
@@ -1,89 +1,89 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
from ._compat import imap
from ._compat import implements_to_string
from ._compat import PY2
from ._compat import text_type
-
-
-class TemplateError(Exception):
- """Baseclass for all template errors."""
-
- if PY2:
-
- def __init__(self, message=None):
- if message is not None:
+
+
+class TemplateError(Exception):
+ """Baseclass for all template errors."""
+
+ if PY2:
+
+ def __init__(self, message=None):
+ if message is not None:
message = text_type(message).encode("utf-8")
- Exception.__init__(self, message)
-
- @property
- def message(self):
- if self.args:
- message = self.args[0]
- if message is not None:
+ Exception.__init__(self, message)
+
+ @property
+ def message(self):
+ if self.args:
+ message = self.args[0]
+ if message is not None:
return message.decode("utf-8", "replace")
-
- def __unicode__(self):
+
+ def __unicode__(self):
return self.message or u""
- else:
-
- def __init__(self, message=None):
- Exception.__init__(self, message)
-
- @property
- def message(self):
- if self.args:
- message = self.args[0]
- if message is not None:
- return message
-
-
-@implements_to_string
-class TemplateNotFound(IOError, LookupError, TemplateError):
+ else:
+
+ def __init__(self, message=None):
+ Exception.__init__(self, message)
+
+ @property
+ def message(self):
+ if self.args:
+ message = self.args[0]
+ if message is not None:
+ return message
+
+
+@implements_to_string
+class TemplateNotFound(IOError, LookupError, TemplateError):
"""Raised if a template does not exist.
-
+
.. versionchanged:: 2.11
If the given name is :class:`Undefined` and no message was
provided, an :exc:`UndefinedError` is raised.
"""
- # looks weird, but removes the warning descriptor that just
- # bogusly warns us about message being deprecated
- message = None
-
- def __init__(self, name, message=None):
+ # looks weird, but removes the warning descriptor that just
+ # bogusly warns us about message being deprecated
+ message = None
+
+ def __init__(self, name, message=None):
IOError.__init__(self, name)
- if message is None:
+ if message is None:
from .runtime import Undefined
if isinstance(name, Undefined):
name._fail_with_undefined_error()
- message = name
-
- self.message = message
- self.name = name
- self.templates = [name]
-
- def __str__(self):
- return self.message
-
-
-class TemplatesNotFound(TemplateNotFound):
- """Like :class:`TemplateNotFound` but raised if multiple templates
- are selected. This is a subclass of :class:`TemplateNotFound`
- exception, so just catching the base exception will catch both.
-
+ message = name
+
+ self.message = message
+ self.name = name
+ self.templates = [name]
+
+ def __str__(self):
+ return self.message
+
+
+class TemplatesNotFound(TemplateNotFound):
+ """Like :class:`TemplateNotFound` but raised if multiple templates
+ are selected. This is a subclass of :class:`TemplateNotFound`
+ exception, so just catching the base exception will catch both.
+
.. versionchanged:: 2.11
If a name in the list of names is :class:`Undefined`, a message
about it being undefined is shown rather than the empty string.
- .. versionadded:: 2.2
- """
-
- def __init__(self, names=(), message=None):
- if message is None:
+ .. versionadded:: 2.2
+ """
+
+ def __init__(self, names=(), message=None):
+ if message is None:
from .runtime import Undefined
parts = []
@@ -97,81 +97,81 @@ class TemplatesNotFound(TemplateNotFound):
message = u"none of the templates given were found: " + u", ".join(
imap(text_type, parts)
)
- TemplateNotFound.__init__(self, names and names[-1] or None, message)
- self.templates = list(names)
-
-
-@implements_to_string
-class TemplateSyntaxError(TemplateError):
- """Raised to tell the user that there is a problem with the template."""
-
- def __init__(self, message, lineno, name=None, filename=None):
- TemplateError.__init__(self, message)
- self.lineno = lineno
- self.name = name
- self.filename = filename
- self.source = None
-
- # this is set to True if the debug.translate_syntax_error
- # function translated the syntax error into a new traceback
- self.translated = False
-
- def __str__(self):
- # for translated errors we only return the message
- if self.translated:
- return self.message
-
- # otherwise attach some stuff
+ TemplateNotFound.__init__(self, names and names[-1] or None, message)
+ self.templates = list(names)
+
+
+@implements_to_string
+class TemplateSyntaxError(TemplateError):
+ """Raised to tell the user that there is a problem with the template."""
+
+ def __init__(self, message, lineno, name=None, filename=None):
+ TemplateError.__init__(self, message)
+ self.lineno = lineno
+ self.name = name
+ self.filename = filename
+ self.source = None
+
+ # this is set to True if the debug.translate_syntax_error
+ # function translated the syntax error into a new traceback
+ self.translated = False
+
+ def __str__(self):
+ # for translated errors we only return the message
+ if self.translated:
+ return self.message
+
+ # otherwise attach some stuff
location = "line %d" % self.lineno
- name = self.filename or self.name
- if name:
- location = 'File "%s", %s' % (name, location)
+ name = self.filename or self.name
+ if name:
+ location = 'File "%s", %s' % (name, location)
lines = [self.message, " " + location]
-
- # if the source is set, add the line to the output
- if self.source is not None:
- try:
- line = self.source.splitlines()[self.lineno - 1]
- except IndexError:
- line = None
- if line:
+
+ # if the source is set, add the line to the output
+ if self.source is not None:
+ try:
+ line = self.source.splitlines()[self.lineno - 1]
+ except IndexError:
+ line = None
+ if line:
lines.append(" " + line.strip())
-
+
return u"\n".join(lines)
-
+
def __reduce__(self):
# https://bugs.python.org/issue1692335 Exceptions that take
# multiple required arguments have problems with pickling.
# Without this, raises TypeError: __init__() missing 1 required
# positional argument: 'lineno'
return self.__class__, (self.message, self.lineno, self.name, self.filename)
-
-
-class TemplateAssertionError(TemplateSyntaxError):
- """Like a template syntax error, but covers cases where something in the
- template caused an error at compile time that wasn't necessarily caused
- by a syntax error. However it's a direct subclass of
- :exc:`TemplateSyntaxError` and has the same attributes.
- """
-
-
-class TemplateRuntimeError(TemplateError):
- """A generic runtime error in the template engine. Under some situations
- Jinja may raise this exception.
- """
-
-
-class UndefinedError(TemplateRuntimeError):
- """Raised if a template tries to operate on :class:`Undefined`."""
-
-
-class SecurityError(TemplateRuntimeError):
- """Raised if a template tries to do something insecure if the
- sandbox is enabled.
- """
-
-
-class FilterArgumentError(TemplateRuntimeError):
- """This error is raised if a filter was called with inappropriate
- arguments
- """
+
+
+class TemplateAssertionError(TemplateSyntaxError):
+ """Like a template syntax error, but covers cases where something in the
+ template caused an error at compile time that wasn't necessarily caused
+ by a syntax error. However it's a direct subclass of
+ :exc:`TemplateSyntaxError` and has the same attributes.
+ """
+
+
+class TemplateRuntimeError(TemplateError):
+ """A generic runtime error in the template engine. Under some situations
+ Jinja may raise this exception.
+ """
+
+
+class UndefinedError(TemplateRuntimeError):
+ """Raised if a template tries to operate on :class:`Undefined`."""
+
+
+class SecurityError(TemplateRuntimeError):
+ """Raised if a template tries to do something insecure if the
+ sandbox is enabled.
+ """
+
+
+class FilterArgumentError(TemplateRuntimeError):
+ """This error is raised if a filter was called with inappropriate
+ arguments
+ """
diff --git a/contrib/python/Jinja2/py2/jinja2/ext.py b/contrib/python/Jinja2/py2/jinja2/ext.py
index 9141be4dac..012093f2ed 100644
--- a/contrib/python/Jinja2/py2/jinja2/ext.py
+++ b/contrib/python/Jinja2/py2/jinja2/ext.py
@@ -1,11 +1,11 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Extension API for adding custom tags and behavior."""
import pprint
-import re
+import re
from sys import version_info
-
+
from markupsafe import Markup
-
+
from . import nodes
from ._compat import iteritems
from ._compat import string_types
@@ -29,106 +29,106 @@ from .nodes import ContextReference
from .runtime import concat
from .utils import contextfunction
from .utils import import_string
-
-# the only real useful gettext functions for a Jinja template. Note
-# that ugettext must be assigned to gettext as Jinja doesn't support
-# non unicode strings.
+
+# the only real useful gettext functions for a Jinja template. Note
+# that ugettext must be assigned to gettext as Jinja doesn't support
+# non unicode strings.
GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
-
+
_ws_re = re.compile(r"\s*\n\s*")
+
-
-class ExtensionRegistry(type):
- """Gives the extension an unique identifier."""
-
+class ExtensionRegistry(type):
+ """Gives the extension an unique identifier."""
+
def __new__(mcs, name, bases, d):
rv = type.__new__(mcs, name, bases, d)
rv.identifier = rv.__module__ + "." + rv.__name__
- return rv
-
-
-class Extension(with_metaclass(ExtensionRegistry, object)):
- """Extensions can be used to add extra functionality to the Jinja template
- system at the parser level. Custom extensions are bound to an environment
- but may not store environment specific data on `self`. The reason for
- this is that an extension can be bound to another environment (for
- overlays) by creating a copy and reassigning the `environment` attribute.
-
- As extensions are created by the environment they cannot accept any
- arguments for configuration. One may want to work around that by using
- a factory function, but that is not possible as extensions are identified
- by their import name. The correct way to configure the extension is
- storing the configuration values on the environment. Because this way the
- environment ends up acting as central configuration storage the
- attributes may clash which is why extensions have to ensure that the names
- they choose for configuration are not too generic. ``prefix`` for example
- is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
- name as includes the name of the extension (fragment cache).
- """
-
- #: if this extension parses this is the list of tags it's listening to.
- tags = set()
-
- #: the priority of that extension. This is especially useful for
- #: extensions that preprocess values. A lower value means higher
- #: priority.
- #:
- #: .. versionadded:: 2.4
- priority = 100
-
- def __init__(self, environment):
- self.environment = environment
-
- def bind(self, environment):
- """Create a copy of this extension bound to another environment."""
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.environment = environment
- return rv
-
- def preprocess(self, source, name, filename=None):
- """This method is called before the actual lexing and can be used to
- preprocess the source. The `filename` is optional. The return value
- must be the preprocessed source.
- """
- return source
-
- def filter_stream(self, stream):
- """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
- to filter tokens returned. This method has to return an iterable of
- :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
- :class:`~jinja2.lexer.TokenStream`.
- """
- return stream
-
- def parse(self, parser):
- """If any of the :attr:`tags` matched this method is called with the
- parser as first argument. The token the parser stream is pointing at
- is the name token that matched. This method has to return one or a
- list of multiple nodes.
- """
- raise NotImplementedError()
-
- def attr(self, name, lineno=None):
- """Return an attribute node for the current extension. This is useful
- to pass constants on extensions to generated template code.
-
- ::
-
- self.attr('_my_attribute', lineno=lineno)
- """
- return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
-
+ return rv
+
+
+class Extension(with_metaclass(ExtensionRegistry, object)):
+ """Extensions can be used to add extra functionality to the Jinja template
+ system at the parser level. Custom extensions are bound to an environment
+ but may not store environment specific data on `self`. The reason for
+ this is that an extension can be bound to another environment (for
+ overlays) by creating a copy and reassigning the `environment` attribute.
+
+ As extensions are created by the environment they cannot accept any
+ arguments for configuration. One may want to work around that by using
+ a factory function, but that is not possible as extensions are identified
+ by their import name. The correct way to configure the extension is
+ storing the configuration values on the environment. Because this way the
+ environment ends up acting as central configuration storage the
+ attributes may clash which is why extensions have to ensure that the names
+ they choose for configuration are not too generic. ``prefix`` for example
+ is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
+ name as includes the name of the extension (fragment cache).
+ """
+
+ #: if this extension parses this is the list of tags it's listening to.
+ tags = set()
+
+ #: the priority of that extension. This is especially useful for
+ #: extensions that preprocess values. A lower value means higher
+ #: priority.
+ #:
+ #: .. versionadded:: 2.4
+ priority = 100
+
+ def __init__(self, environment):
+ self.environment = environment
+
+ def bind(self, environment):
+ """Create a copy of this extension bound to another environment."""
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.environment = environment
+ return rv
+
+ def preprocess(self, source, name, filename=None):
+ """This method is called before the actual lexing and can be used to
+ preprocess the source. The `filename` is optional. The return value
+ must be the preprocessed source.
+ """
+ return source
+
+ def filter_stream(self, stream):
+ """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
+ to filter tokens returned. This method has to return an iterable of
+ :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
+ :class:`~jinja2.lexer.TokenStream`.
+ """
+ return stream
+
+ def parse(self, parser):
+ """If any of the :attr:`tags` matched this method is called with the
+ parser as first argument. The token the parser stream is pointing at
+ is the name token that matched. This method has to return one or a
+ list of multiple nodes.
+ """
+ raise NotImplementedError()
+
+ def attr(self, name, lineno=None):
+ """Return an attribute node for the current extension. This is useful
+ to pass constants on extensions to generated template code.
+
+ ::
+
+ self.attr('_my_attribute', lineno=lineno)
+ """
+ return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
+
def call_method(
self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
):
- """Call a method of the extension. This is a shortcut for
- :meth:`attr` + :class:`jinja2.nodes.Call`.
- """
- if args is None:
- args = []
- if kwargs is None:
- kwargs = []
+ """Call a method of the extension. This is a shortcut for
+ :meth:`attr` + :class:`jinja2.nodes.Call`.
+ """
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = []
return nodes.Call(
self.attr(name, lineno=lineno),
args,
@@ -137,196 +137,196 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
dyn_kwargs,
lineno=lineno,
)
-
-
-@contextfunction
-def _gettext_alias(__context, *args, **kwargs):
+
+
+@contextfunction
+def _gettext_alias(__context, *args, **kwargs):
return __context.call(__context.resolve("gettext"), *args, **kwargs)
-
-
-def _make_new_gettext(func):
- @contextfunction
- def gettext(__context, __string, **variables):
- rv = __context.call(func, __string)
- if __context.eval_ctx.autoescape:
- rv = Markup(rv)
+
+
+def _make_new_gettext(func):
+ @contextfunction
+ def gettext(__context, __string, **variables):
+ rv = __context.call(func, __string)
+ if __context.eval_ctx.autoescape:
+ rv = Markup(rv)
# Always treat as a format string, even if there are no
# variables. This makes translation strings more consistent
# and predictable. This requires escaping
- return rv % variables
-
- return gettext
-
-
-def _make_new_ngettext(func):
- @contextfunction
- def ngettext(__context, __singular, __plural, __num, **variables):
+ return rv % variables
+
+ return gettext
+
+
+def _make_new_ngettext(func):
+ @contextfunction
+ def ngettext(__context, __singular, __plural, __num, **variables):
variables.setdefault("num", __num)
- rv = __context.call(func, __singular, __plural, __num)
- if __context.eval_ctx.autoescape:
- rv = Markup(rv)
+ rv = __context.call(func, __singular, __plural, __num)
+ if __context.eval_ctx.autoescape:
+ rv = Markup(rv)
# Always treat as a format string, see gettext comment above.
- return rv % variables
-
- return ngettext
+ return rv % variables
-
-class InternationalizationExtension(Extension):
+ return ngettext
+
+
+class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja."""
-
+
tags = {"trans"}
- # TODO: the i18n extension is currently reevaluating values in a few
- # situations. Take this example:
- # {% trans count=something() %}{{ count }} foo{% pluralize
- # %}{{ count }} fooss{% endtrans %}
- # something is called twice here. One time for the gettext value and
- # the other time for the n-parameter of the ngettext function.
-
- def __init__(self, environment):
- Extension.__init__(self, environment)
+ # TODO: the i18n extension is currently reevaluating values in a few
+ # situations. Take this example:
+ # {% trans count=something() %}{{ count }} foo{% pluralize
+ # %}{{ count }} fooss{% endtrans %}
+ # something is called twice here. One time for the gettext value and
+ # the other time for the n-parameter of the ngettext function.
+
+ def __init__(self, environment):
+ Extension.__init__(self, environment)
environment.globals["_"] = _gettext_alias
- environment.extend(
- install_gettext_translations=self._install,
- install_null_translations=self._install_null,
- install_gettext_callables=self._install_callables,
- uninstall_gettext_translations=self._uninstall,
- extract_translations=self._extract,
+ environment.extend(
+ install_gettext_translations=self._install,
+ install_null_translations=self._install_null,
+ install_gettext_callables=self._install_callables,
+ uninstall_gettext_translations=self._uninstall,
+ extract_translations=self._extract,
newstyle_gettext=False,
- )
-
- def _install(self, translations, newstyle=None):
+ )
+
+ def _install(self, translations, newstyle=None):
gettext = getattr(translations, "ugettext", None)
- if gettext is None:
- gettext = translations.gettext
+ if gettext is None:
+ gettext = translations.gettext
ngettext = getattr(translations, "ungettext", None)
- if ngettext is None:
- ngettext = translations.ngettext
- self._install_callables(gettext, ngettext, newstyle)
-
- def _install_null(self, newstyle=None):
- self._install_callables(
+ if ngettext is None:
+ ngettext = translations.ngettext
+ self._install_callables(gettext, ngettext, newstyle)
+
+ def _install_null(self, newstyle=None):
+ self._install_callables(
lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle
- )
-
- def _install_callables(self, gettext, ngettext, newstyle=None):
- if newstyle is not None:
- self.environment.newstyle_gettext = newstyle
- if self.environment.newstyle_gettext:
- gettext = _make_new_gettext(gettext)
- ngettext = _make_new_ngettext(ngettext)
+ )
+
+ def _install_callables(self, gettext, ngettext, newstyle=None):
+ if newstyle is not None:
+ self.environment.newstyle_gettext = newstyle
+ if self.environment.newstyle_gettext:
+ gettext = _make_new_gettext(gettext)
+ ngettext = _make_new_ngettext(ngettext)
self.environment.globals.update(gettext=gettext, ngettext=ngettext)
-
- def _uninstall(self, translations):
+
+ def _uninstall(self, translations):
for key in "gettext", "ngettext":
- self.environment.globals.pop(key, None)
-
- def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
- if isinstance(source, string_types):
- source = self.environment.parse(source)
- return extract_from_ast(source, gettext_functions)
-
- def parse(self, parser):
- """Parse a translatable tag."""
- lineno = next(parser.stream).lineno
- num_called_num = False
-
- # find all the variables referenced. Additionally a variable can be
- # defined in the body of the trans block too, but this is checked at
- # a later state.
- plural_expr = None
- plural_expr_assignment = None
- variables = {}
- trimmed = None
+ self.environment.globals.pop(key, None)
+
+ def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
+ if isinstance(source, string_types):
+ source = self.environment.parse(source)
+ return extract_from_ast(source, gettext_functions)
+
+ def parse(self, parser):
+ """Parse a translatable tag."""
+ lineno = next(parser.stream).lineno
+ num_called_num = False
+
+ # find all the variables referenced. Additionally a variable can be
+ # defined in the body of the trans block too, but this is checked at
+ # a later state.
+ plural_expr = None
+ plural_expr_assignment = None
+ variables = {}
+ trimmed = None
while parser.stream.current.type != "block_end":
- if variables:
+ if variables:
parser.stream.expect("comma")
-
- # skip colon for python compatibility
+
+ # skip colon for python compatibility
if parser.stream.skip_if("colon"):
- break
-
+ break
+
name = parser.stream.expect("name")
- if name.value in variables:
+ if name.value in variables:
parser.fail(
"translatable variable %r defined twice." % name.value,
name.lineno,
exc=TemplateAssertionError,
)
-
- # expressions
+
+ # expressions
if parser.stream.current.type == "assign":
- next(parser.stream)
- variables[name.value] = var = parser.parse_expression()
+ next(parser.stream)
+ variables[name.value] = var = parser.parse_expression()
elif trimmed is None and name.value in ("trimmed", "notrimmed"):
trimmed = name.value == "trimmed"
- continue
- else:
+ continue
+ else:
variables[name.value] = var = nodes.Name(name.value, "load")
-
- if plural_expr is None:
- if isinstance(var, nodes.Call):
+
+ if plural_expr is None:
+ if isinstance(var, nodes.Call):
plural_expr = nodes.Name("_trans", "load")
- variables[name.value] = plural_expr
- plural_expr_assignment = nodes.Assign(
+ variables[name.value] = plural_expr
+ plural_expr_assignment = nodes.Assign(
nodes.Name("_trans", "store"), var
)
- else:
- plural_expr = var
+ else:
+ plural_expr = var
num_called_num = name.value == "num"
-
+
parser.stream.expect("block_end")
-
- plural = None
- have_plural = False
- referenced = set()
-
- # now parse until endtrans or pluralize
- singular_names, singular = self._parse_block(parser, True)
- if singular_names:
- referenced.update(singular_names)
- if plural_expr is None:
+
+ plural = None
+ have_plural = False
+ referenced = set()
+
+ # now parse until endtrans or pluralize
+ singular_names, singular = self._parse_block(parser, True)
+ if singular_names:
+ referenced.update(singular_names)
+ if plural_expr is None:
plural_expr = nodes.Name(singular_names[0], "load")
num_called_num = singular_names[0] == "num"
-
- # if we have a pluralize block, we parse that too
+
+ # if we have a pluralize block, we parse that too
if parser.stream.current.test("name:pluralize"):
- have_plural = True
- next(parser.stream)
+ have_plural = True
+ next(parser.stream)
if parser.stream.current.type != "block_end":
name = parser.stream.expect("name")
- if name.value not in variables:
+ if name.value not in variables:
parser.fail(
"unknown variable %r for pluralization" % name.value,
name.lineno,
exc=TemplateAssertionError,
)
- plural_expr = variables[name.value]
+ plural_expr = variables[name.value]
num_called_num = name.value == "num"
parser.stream.expect("block_end")
- plural_names, plural = self._parse_block(parser, False)
- next(parser.stream)
- referenced.update(plural_names)
- else:
- next(parser.stream)
-
- # register free names as simple name expressions
- for var in referenced:
- if var not in variables:
+ plural_names, plural = self._parse_block(parser, False)
+ next(parser.stream)
+ referenced.update(plural_names)
+ else:
+ next(parser.stream)
+
+ # register free names as simple name expressions
+ for var in referenced:
+ if var not in variables:
variables[var] = nodes.Name(var, "load")
-
- if not have_plural:
- plural_expr = None
- elif plural_expr is None:
+
+ if not have_plural:
+ plural_expr = None
+ elif plural_expr is None:
parser.fail("pluralize without variables", lineno)
-
- if trimmed is None:
+
+ if trimmed is None:
trimmed = self.environment.policies["ext.i18n.trimmed"]
- if trimmed:
- singular = self._trim_whitespace(singular)
- if plural:
- plural = self._trim_whitespace(plural)
-
+ if trimmed:
+ singular = self._trim_whitespace(singular)
+ if plural:
+ plural = self._trim_whitespace(plural)
+
node = self._make_node(
singular,
plural,
@@ -335,67 +335,67 @@ class InternationalizationExtension(Extension):
bool(referenced),
num_called_num and have_plural,
)
- node.set_lineno(lineno)
- if plural_expr_assignment is not None:
- return [plural_expr_assignment, node]
- else:
- return node
-
+ node.set_lineno(lineno)
+ if plural_expr_assignment is not None:
+ return [plural_expr_assignment, node]
+ else:
+ return node
+
def _trim_whitespace(self, string, _ws_re=_ws_re):
return _ws_re.sub(" ", string.strip())
-
- def _parse_block(self, parser, allow_pluralize):
- """Parse until the next block tag with a given name."""
- referenced = []
- buf = []
- while 1:
+
+ def _parse_block(self, parser, allow_pluralize):
+ """Parse until the next block tag with a given name."""
+ referenced = []
+ buf = []
+ while 1:
if parser.stream.current.type == "data":
buf.append(parser.stream.current.value.replace("%", "%%"))
- next(parser.stream)
+ next(parser.stream)
elif parser.stream.current.type == "variable_begin":
- next(parser.stream)
+ next(parser.stream)
name = parser.stream.expect("name").value
- referenced.append(name)
+ referenced.append(name)
buf.append("%%(%s)s" % name)
parser.stream.expect("variable_end")
elif parser.stream.current.type == "block_begin":
- next(parser.stream)
+ next(parser.stream)
if parser.stream.current.test("name:endtrans"):
- break
+ break
elif parser.stream.current.test("name:pluralize"):
- if allow_pluralize:
- break
+ if allow_pluralize:
+ break
parser.fail(
"a translatable section can have only one pluralize section"
)
parser.fail(
"control structures in translatable sections are not allowed"
)
- elif parser.stream.eos:
+ elif parser.stream.eos:
parser.fail("unclosed translation block")
- else:
+ else:
raise RuntimeError("internal parser error")
-
- return referenced, concat(buf)
-
+
+ return referenced, concat(buf)
+
def _make_node(
self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
):
- """Generates a useful node from the data provided."""
- # no variables referenced? no need to escape for old style
- # gettext invocations only if there are vars.
- if not vars_referenced and not self.environment.newstyle_gettext:
+ """Generates a useful node from the data provided."""
+ # no variables referenced? no need to escape for old style
+ # gettext invocations only if there are vars.
+ if not vars_referenced and not self.environment.newstyle_gettext:
singular = singular.replace("%%", "%")
- if plural:
+ if plural:
plural = plural.replace("%%", "%")
-
- # singular only:
- if plural_expr is None:
+
+ # singular only:
+ if plural_expr is None:
gettext = nodes.Name("gettext", "load")
node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None)
-
- # singular and plural
- else:
+
+ # singular and plural
+ else:
ngettext = nodes.Name("ngettext", "load")
node = nodes.Call(
ngettext,
@@ -404,24 +404,24 @@ class InternationalizationExtension(Extension):
None,
None,
)
-
- # in case newstyle gettext is used, the method is powerful
- # enough to handle the variable expansion and autoescape
- # handling itself
- if self.environment.newstyle_gettext:
- for key, value in iteritems(variables):
- # the function adds that later anyways in case num was
- # called num, so just skip it.
+
+ # in case newstyle gettext is used, the method is powerful
+ # enough to handle the variable expansion and autoescape
+ # handling itself
+ if self.environment.newstyle_gettext:
+ for key, value in iteritems(variables):
+ # the function adds that later anyways in case num was
+ # called num, so just skip it.
if num_called_num and key == "num":
- continue
- node.kwargs.append(nodes.Keyword(key, value))
-
- # otherwise do that here
- else:
- # mark the return value as safe if we are in an
- # environment with autoescaping turned on
- node = nodes.MarkSafeIfAutoescape(node)
- if variables:
+ continue
+ node.kwargs.append(nodes.Keyword(key, value))
+
+ # otherwise do that here
+ else:
+ # mark the return value as safe if we are in an
+ # environment with autoescaping turned on
+ node = nodes.MarkSafeIfAutoescape(node)
+ if variables:
node = nodes.Mod(
node,
nodes.Dict(
@@ -431,42 +431,42 @@ class InternationalizationExtension(Extension):
]
),
)
- return nodes.Output([node])
-
-
-class ExprStmtExtension(Extension):
+ return nodes.Output([node])
+
+
+class ExprStmtExtension(Extension):
"""Adds a `do` tag to Jinja that works like the print statement just
- that it doesn't print the return value.
- """
-
+ that it doesn't print the return value.
+ """
+
tags = set(["do"])
- def parse(self, parser):
- node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
- node.node = parser.parse_tuple()
- return node
-
-
-class LoopControlExtension(Extension):
- """Adds break and continue to the template engine."""
-
+ def parse(self, parser):
+ node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
+ node.node = parser.parse_tuple()
+ return node
+
+
+class LoopControlExtension(Extension):
+ """Adds break and continue to the template engine."""
+
tags = set(["break", "continue"])
- def parse(self, parser):
- token = next(parser.stream)
+ def parse(self, parser):
+ token = next(parser.stream)
if token.value == "break":
- return nodes.Break(lineno=token.lineno)
- return nodes.Continue(lineno=token.lineno)
-
-
-class WithExtension(Extension):
- pass
-
-
-class AutoEscapeExtension(Extension):
- pass
-
-
+ return nodes.Break(lineno=token.lineno)
+ return nodes.Continue(lineno=token.lineno)
+
+
+class WithExtension(Extension):
+ pass
+
+
+class AutoEscapeExtension(Extension):
+ pass
+
+
class DebugExtension(Extension):
"""A ``{% debug %}`` tag that dumps the available variables,
filters, and tests.
@@ -511,153 +511,153 @@ class DebugExtension(Extension):
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
- """Extract localizable strings from the given template node. Per
- default this function returns matches in babel style that means non string
- parameters as well as keyword arguments are returned as `None`. This
- allows Babel to figure out what you really meant if you are using
- gettext functions that allow keyword arguments for placeholder expansion.
- If you don't want that behavior set the `babel_style` parameter to `False`
- which causes only strings to be returned and parameters are always stored
- in tuples. As a consequence invalid gettext calls (calls without a single
- string parameter or string parameters after non-string parameters) are
- skipped.
-
- This example explains the behavior:
-
- >>> from jinja2 import Environment
- >>> env = Environment()
- >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
- >>> list(extract_from_ast(node))
- [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
- >>> list(extract_from_ast(node, babel_style=False))
- [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
-
- For every string found this function yields a ``(lineno, function,
- message)`` tuple, where:
-
- * ``lineno`` is the number of the line on which the string was found,
- * ``function`` is the name of the ``gettext`` function used (if the
- string was extracted from embedded Python code), and
- * ``message`` is the string itself (a ``unicode`` object, or a tuple
- of ``unicode`` objects for functions with multiple string arguments).
-
- This extraction function operates on the AST and is because of that unable
- to extract any comments. For comment support you have to use the babel
- extraction interface or extract comments yourself.
- """
- for node in node.find_all(nodes.Call):
+ """Extract localizable strings from the given template node. Per
+ default this function returns matches in babel style that means non string
+ parameters as well as keyword arguments are returned as `None`. This
+ allows Babel to figure out what you really meant if you are using
+ gettext functions that allow keyword arguments for placeholder expansion.
+ If you don't want that behavior set the `babel_style` parameter to `False`
+ which causes only strings to be returned and parameters are always stored
+ in tuples. As a consequence invalid gettext calls (calls without a single
+ string parameter or string parameters after non-string parameters) are
+ skipped.
+
+ This example explains the behavior:
+
+ >>> from jinja2 import Environment
+ >>> env = Environment()
+ >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
+ >>> list(extract_from_ast(node))
+ [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
+ >>> list(extract_from_ast(node, babel_style=False))
+ [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
+
+ For every string found this function yields a ``(lineno, function,
+ message)`` tuple, where:
+
+ * ``lineno`` is the number of the line on which the string was found,
+ * ``function`` is the name of the ``gettext`` function used (if the
+ string was extracted from embedded Python code), and
+ * ``message`` is the string itself (a ``unicode`` object, or a tuple
+ of ``unicode`` objects for functions with multiple string arguments).
+
+ This extraction function operates on the AST and is because of that unable
+ to extract any comments. For comment support you have to use the babel
+ extraction interface or extract comments yourself.
+ """
+ for node in node.find_all(nodes.Call):
if (
not isinstance(node.node, nodes.Name)
or node.node.name not in gettext_functions
):
- continue
-
- strings = []
- for arg in node.args:
+ continue
+
+ strings = []
+ for arg in node.args:
if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types):
- strings.append(arg.value)
- else:
- strings.append(None)
-
+ strings.append(arg.value)
+ else:
+ strings.append(None)
+
for _ in node.kwargs:
- strings.append(None)
- if node.dyn_args is not None:
- strings.append(None)
- if node.dyn_kwargs is not None:
- strings.append(None)
-
- if not babel_style:
- strings = tuple(x for x in strings if x is not None)
- if not strings:
- continue
- else:
- if len(strings) == 1:
- strings = strings[0]
- else:
- strings = tuple(strings)
- yield node.lineno, node.node.name, strings
-
-
-class _CommentFinder(object):
- """Helper class to find comments in a token stream. Can only
- find comments for gettext calls forwards. Once the comment
- from line 4 is found, a comment for line 1 will not return a
- usable value.
- """
-
- def __init__(self, tokens, comment_tags):
- self.tokens = tokens
- self.comment_tags = comment_tags
- self.offset = 0
- self.last_lineno = 0
-
- def find_backwards(self, offset):
- try:
+ strings.append(None)
+ if node.dyn_args is not None:
+ strings.append(None)
+ if node.dyn_kwargs is not None:
+ strings.append(None)
+
+ if not babel_style:
+ strings = tuple(x for x in strings if x is not None)
+ if not strings:
+ continue
+ else:
+ if len(strings) == 1:
+ strings = strings[0]
+ else:
+ strings = tuple(strings)
+ yield node.lineno, node.node.name, strings
+
+
+class _CommentFinder(object):
+ """Helper class to find comments in a token stream. Can only
+ find comments for gettext calls forwards. Once the comment
+ from line 4 is found, a comment for line 1 will not return a
+ usable value.
+ """
+
+ def __init__(self, tokens, comment_tags):
+ self.tokens = tokens
+ self.comment_tags = comment_tags
+ self.offset = 0
+ self.last_lineno = 0
+
+ def find_backwards(self, offset):
+ try:
for _, token_type, token_value in reversed(
self.tokens[self.offset : offset]
):
if token_type in ("comment", "linecomment"):
- try:
- prefix, comment = token_value.split(None, 1)
- except ValueError:
- continue
- if prefix in self.comment_tags:
- return [comment.rstrip()]
- return []
- finally:
- self.offset = offset
-
- def find_comments(self, lineno):
- if not self.comment_tags or self.last_lineno > lineno:
- return []
+ try:
+ prefix, comment = token_value.split(None, 1)
+ except ValueError:
+ continue
+ if prefix in self.comment_tags:
+ return [comment.rstrip()]
+ return []
+ finally:
+ self.offset = offset
+
+ def find_comments(self, lineno):
+ if not self.comment_tags or self.last_lineno > lineno:
+ return []
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
- if token_lineno > lineno:
- return self.find_backwards(self.offset + idx)
- return self.find_backwards(len(self.tokens))
-
-
-def babel_extract(fileobj, keywords, comment_tags, options):
- """Babel extraction method for Jinja templates.
-
- .. versionchanged:: 2.3
- Basic support for translation comments was added. If `comment_tags`
- is now set to a list of keywords for extraction, the extractor will
+ if token_lineno > lineno:
+ return self.find_backwards(self.offset + idx)
+ return self.find_backwards(len(self.tokens))
+
+
+def babel_extract(fileobj, keywords, comment_tags, options):
+ """Babel extraction method for Jinja templates.
+
+ .. versionchanged:: 2.3
+ Basic support for translation comments was added. If `comment_tags`
+ is now set to a list of keywords for extraction, the extractor will
try to find the best preceding comment that begins with one of the
- keywords. For best results, make sure to not have more than one
- gettext call in one line of code and the matching comment in the
- same line or the line before.
-
- .. versionchanged:: 2.5.1
- The `newstyle_gettext` flag can be set to `True` to enable newstyle
- gettext calls.
-
- .. versionchanged:: 2.7
- A `silent` option can now be provided. If set to `False` template
- syntax errors are propagated instead of being ignored.
-
- :param fileobj: the file-like object the messages should be extracted from
- :param keywords: a list of keywords (i.e. function names) that should be
- recognized as translation functions
- :param comment_tags: a list of translator tags to search for and include
- in the results.
- :param options: a dictionary of additional options (optional)
- :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
- (comments will be empty currently)
- """
- extensions = set()
+ keywords. For best results, make sure to not have more than one
+ gettext call in one line of code and the matching comment in the
+ same line or the line before.
+
+ .. versionchanged:: 2.5.1
+ The `newstyle_gettext` flag can be set to `True` to enable newstyle
+ gettext calls.
+
+ .. versionchanged:: 2.7
+ A `silent` option can now be provided. If set to `False` template
+ syntax errors are propagated instead of being ignored.
+
+ :param fileobj: the file-like object the messages should be extracted from
+ :param keywords: a list of keywords (i.e. function names) that should be
+ recognized as translation functions
+ :param comment_tags: a list of translator tags to search for and include
+ in the results.
+ :param options: a dictionary of additional options (optional)
+ :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
+ (comments will be empty currently)
+ """
+ extensions = set()
for extension in options.get("extensions", "").split(","):
- extension = extension.strip()
- if not extension:
- continue
- extensions.add(import_string(extension))
- if InternationalizationExtension not in extensions:
- extensions.add(InternationalizationExtension)
-
- def getbool(options, key, default=False):
+ extension = extension.strip()
+ if not extension:
+ continue
+ extensions.add(import_string(extension))
+ if InternationalizationExtension not in extensions:
+ extensions.add(InternationalizationExtension)
+
+ def getbool(options, key, default=False):
return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
-
+
silent = getbool(options, "silent", True)
- environment = Environment(
+ environment = Environment(
options.get("block_start_string", BLOCK_START_STRING),
options.get("block_end_string", BLOCK_END_STRING),
options.get("variable_start_string", VARIABLE_START_STRING),
@@ -668,37 +668,37 @@ def babel_extract(fileobj, keywords, comment_tags, options):
options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
getbool(options, "trim_blocks", TRIM_BLOCKS),
getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
- NEWLINE_SEQUENCE,
+ NEWLINE_SEQUENCE,
getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
- frozenset(extensions),
- cache_size=0,
+ frozenset(extensions),
+ cache_size=0,
auto_reload=False,
- )
-
+ )
+
if getbool(options, "trimmed"):
environment.policies["ext.i18n.trimmed"] = True
if getbool(options, "newstyle_gettext"):
- environment.newstyle_gettext = True
-
+ environment.newstyle_gettext = True
+
source = fileobj.read().decode(options.get("encoding", "utf-8"))
- try:
- node = environment.parse(source)
- tokens = list(environment.lex(environment.preprocess(source)))
+ try:
+ node = environment.parse(source)
+ tokens = list(environment.lex(environment.preprocess(source)))
except TemplateSyntaxError:
- if not silent:
- raise
- # skip templates with syntax errors
- return
-
- finder = _CommentFinder(tokens, comment_tags)
- for lineno, func, message in extract_from_ast(node, keywords):
- yield lineno, func, message, finder.find_comments(lineno)
-
-
-#: nicer import names
-i18n = InternationalizationExtension
-do = ExprStmtExtension
-loopcontrols = LoopControlExtension
-with_ = WithExtension
-autoescape = AutoEscapeExtension
+ if not silent:
+ raise
+ # skip templates with syntax errors
+ return
+
+ finder = _CommentFinder(tokens, comment_tags)
+ for lineno, func, message in extract_from_ast(node, keywords):
+ yield lineno, func, message, finder.find_comments(lineno)
+
+
+#: nicer import names
+i18n = InternationalizationExtension
+do = ExprStmtExtension
+loopcontrols = LoopControlExtension
+with_ = WithExtension
+autoescape = AutoEscapeExtension
debug = DebugExtension
diff --git a/contrib/python/Jinja2/py2/jinja2/filters.py b/contrib/python/Jinja2/py2/jinja2/filters.py
index 74b108dcec..f7c35ab7f2 100644
--- a/contrib/python/Jinja2/py2/jinja2/filters.py
+++ b/contrib/python/Jinja2/py2/jinja2/filters.py
@@ -1,17 +1,17 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Built-in template filters used with the ``|`` operator."""
-import math
-import random
+import math
+import random
import re
-import warnings
-from collections import namedtuple
+import warnings
+from collections import namedtuple
from itertools import chain
from itertools import groupby
-
+
from markupsafe import escape
from markupsafe import Markup
from markupsafe import soft_unicode
-
+
from ._compat import abc
from ._compat import imap
from ._compat import iteritems
@@ -23,67 +23,67 @@ from .utils import htmlsafe_json_dumps
from .utils import pformat
from .utils import unicode_urlencode
from .utils import urlize
-
+
_word_re = re.compile(r"\w+", re.UNICODE)
_word_beginning_split_re = re.compile(r"([-\s\(\{\[\<]+)", re.UNICODE)
-
-
-def contextfilter(f):
- """Decorator for marking context dependent filters. The current
- :class:`Context` will be passed as first argument.
- """
- f.contextfilter = True
- return f
-
-
-def evalcontextfilter(f):
- """Decorator for marking eval-context dependent filters. An eval
- context object is passed as first argument. For more information
- about the eval context, see :ref:`eval-context`.
-
- .. versionadded:: 2.4
- """
- f.evalcontextfilter = True
- return f
-
-
-def environmentfilter(f):
- """Decorator for marking environment dependent filters. The current
- :class:`Environment` is passed to the filter as first argument.
- """
- f.environmentfilter = True
- return f
-
-
-def ignore_case(value):
- """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
- to lowercase and returns other types as-is."""
- return value.lower() if isinstance(value, string_types) else value
-
-
+
+
+def contextfilter(f):
+ """Decorator for marking context dependent filters. The current
+ :class:`Context` will be passed as first argument.
+ """
+ f.contextfilter = True
+ return f
+
+
+def evalcontextfilter(f):
+ """Decorator for marking eval-context dependent filters. An eval
+ context object is passed as first argument. For more information
+ about the eval context, see :ref:`eval-context`.
+
+ .. versionadded:: 2.4
+ """
+ f.evalcontextfilter = True
+ return f
+
+
+def environmentfilter(f):
+ """Decorator for marking environment dependent filters. The current
+ :class:`Environment` is passed to the filter as first argument.
+ """
+ f.environmentfilter = True
+ return f
+
+
+def ignore_case(value):
+ """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
+ to lowercase and returns other types as-is."""
+ return value.lower() if isinstance(value, string_types) else value
+
+
def make_attrgetter(environment, attribute, postprocess=None, default=None):
- """Returns a callable that looks up the given attribute from a
- passed object with the rules of the environment. Dots are allowed
- to access attributes of attributes. Integer parts in paths are
- looked up as integers.
- """
+ """Returns a callable that looks up the given attribute from a
+ passed object with the rules of the environment. Dots are allowed
+ to access attributes of attributes. Integer parts in paths are
+ looked up as integers.
+ """
attribute = _prepare_attribute_parts(attribute)
-
- def attrgetter(item):
- for part in attribute:
- item = environment.getitem(item, part)
-
+
+ def attrgetter(item):
+ for part in attribute:
+ item = environment.getitem(item, part)
+
if default and isinstance(item, Undefined):
item = default
- if postprocess is not None:
- item = postprocess(item)
-
- return item
-
- return attrgetter
-
-
+ if postprocess is not None:
+ item = postprocess(item)
+
+ return item
+
+ return attrgetter
+
+
def make_multi_attrgetter(environment, attribute, postprocess=None):
"""Returns a callable that looks up the given comma separated
attributes from a passed object with the rules of the environment.
@@ -127,16 +127,16 @@ def _prepare_attribute_parts(attr):
return [attr]
-def do_forceescape(value):
- """Enforce HTML escaping. This will probably double escape variables."""
+def do_forceescape(value):
+ """Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, "__html__"):
- value = value.__html__()
- return escape(text_type(value))
-
-
-def do_urlencode(value):
+ value = value.__html__()
+ return escape(text_type(value))
+
+
+def do_urlencode(value):
"""Quote data for use in a URL path or query using UTF-8.
-
+
Basic wrapper around :func:`urllib.parse.quote` when given a
string, or :func:`urllib.parse.urlencode` for a dict or iterable.
@@ -148,110 +148,110 @@ def do_urlencode(value):
"%2F" equivalently in paths. If you need quoted slashes, use the
``|replace("/", "%2F")`` filter.
- .. versionadded:: 2.7
- """
+ .. versionadded:: 2.7
+ """
if isinstance(value, string_types) or not isinstance(value, abc.Iterable):
- return unicode_urlencode(value)
-
+ return unicode_urlencode(value)
+
if isinstance(value, dict):
items = iteritems(value)
else:
items = iter(value)
-
+
return u"&".join(
"%s=%s" % (unicode_urlencode(k, for_qs=True), unicode_urlencode(v, for_qs=True))
for k, v in items
)
-@evalcontextfilter
-def do_replace(eval_ctx, s, old, new, count=None):
- """Return a copy of the value with all occurrences of a substring
- replaced with a new one. The first argument is the substring
- that should be replaced, the second is the replacement string.
- If the optional third argument ``count`` is given, only the first
- ``count`` occurrences are replaced:
-
- .. sourcecode:: jinja
-
- {{ "Hello World"|replace("Hello", "Goodbye") }}
- -> Goodbye World
-
- {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
- -> d'oh, d'oh, aaargh
- """
- if count is None:
- count = -1
- if not eval_ctx.autoescape:
- return text_type(s).replace(text_type(old), text_type(new), count)
+@evalcontextfilter
+def do_replace(eval_ctx, s, old, new, count=None):
+ """Return a copy of the value with all occurrences of a substring
+ replaced with a new one. The first argument is the substring
+ that should be replaced, the second is the replacement string.
+ If the optional third argument ``count`` is given, only the first
+ ``count`` occurrences are replaced:
+
+ .. sourcecode:: jinja
+
+ {{ "Hello World"|replace("Hello", "Goodbye") }}
+ -> Goodbye World
+
+ {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
+ -> d'oh, d'oh, aaargh
+ """
+ if count is None:
+ count = -1
+ if not eval_ctx.autoescape:
+ return text_type(s).replace(text_type(old), text_type(new), count)
if (
hasattr(old, "__html__")
or hasattr(new, "__html__")
and not hasattr(s, "__html__")
):
- s = escape(s)
- else:
- s = soft_unicode(s)
- return s.replace(soft_unicode(old), soft_unicode(new), count)
-
-
-def do_upper(s):
- """Convert a value to uppercase."""
- return soft_unicode(s).upper()
-
-
-def do_lower(s):
- """Convert a value to lowercase."""
- return soft_unicode(s).lower()
-
-
-@evalcontextfilter
-def do_xmlattr(_eval_ctx, d, autospace=True):
- """Create an SGML/XML attribute string based on the items in a dict.
- All values that are neither `none` nor `undefined` are automatically
- escaped:
-
- .. sourcecode:: html+jinja
-
- <ul{{ {'class': 'my_list', 'missing': none,
- 'id': 'list-%d'|format(variable)}|xmlattr }}>
- ...
- </ul>
-
- Results in something like this:
-
- .. sourcecode:: html
-
- <ul class="my_list" id="list-42">
- ...
- </ul>
-
- As you can see it automatically prepends a space in front of the item
- if the filter returned something unless the second parameter is false.
- """
+ s = escape(s)
+ else:
+ s = soft_unicode(s)
+ return s.replace(soft_unicode(old), soft_unicode(new), count)
+
+
+def do_upper(s):
+ """Convert a value to uppercase."""
+ return soft_unicode(s).upper()
+
+
+def do_lower(s):
+ """Convert a value to lowercase."""
+ return soft_unicode(s).lower()
+
+
+@evalcontextfilter
+def do_xmlattr(_eval_ctx, d, autospace=True):
+ """Create an SGML/XML attribute string based on the items in a dict.
+ All values that are neither `none` nor `undefined` are automatically
+ escaped:
+
+ .. sourcecode:: html+jinja
+
+ <ul{{ {'class': 'my_list', 'missing': none,
+ 'id': 'list-%d'|format(variable)}|xmlattr }}>
+ ...
+ </ul>
+
+ Results in something like this:
+
+ .. sourcecode:: html
+
+ <ul class="my_list" id="list-42">
+ ...
+ </ul>
+
+ As you can see it automatically prepends a space in front of the item
+ if the filter returned something unless the second parameter is false.
+ """
rv = u" ".join(
- u'%s="%s"' % (escape(key), escape(value))
- for key, value in iteritems(d)
- if value is not None and not isinstance(value, Undefined)
- )
- if autospace and rv:
+ u'%s="%s"' % (escape(key), escape(value))
+ for key, value in iteritems(d)
+ if value is not None and not isinstance(value, Undefined)
+ )
+ if autospace and rv:
rv = u" " + rv
- if _eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-def do_capitalize(s):
- """Capitalize a value. The first character will be uppercase, all others
- lowercase.
- """
- return soft_unicode(s).capitalize()
-
-
-def do_title(s):
- """Return a titlecased version of the value. I.e. words will start with
- uppercase letters, all remaining characters are lowercase.
- """
+ if _eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+
+def do_capitalize(s):
+ """Capitalize a value. The first character will be uppercase, all others
+ lowercase.
+ """
+ return soft_unicode(s).capitalize()
+
+
+def do_title(s):
+ """Return a titlecased version of the value. I.e. words will start with
+ uppercase letters, all remaining characters are lowercase.
+ """
return "".join(
[
item[0].upper() + item[1:].lower()
@@ -259,51 +259,51 @@ def do_title(s):
if item
]
)
-
-
+
+
def do_dictsort(value, case_sensitive=False, by="key", reverse=False):
- """Sort a dict and yield (key, value) pairs. Because python dicts are
- unsorted you may want to use this function to order them by either
- key or value:
-
- .. sourcecode:: jinja
-
+ """Sort a dict and yield (key, value) pairs. Because python dicts are
+ unsorted you may want to use this function to order them by either
+ key or value:
+
+ .. sourcecode:: jinja
+
{% for key, value in mydict|dictsort %}
- sort the dict by key, case insensitive
-
+ sort the dict by key, case insensitive
+
{% for key, value in mydict|dictsort(reverse=true) %}
- sort the dict by key, case insensitive, reverse order
-
+ sort the dict by key, case insensitive, reverse order
+
{% for key, value in mydict|dictsort(true) %}
- sort the dict by key, case sensitive
-
+ sort the dict by key, case sensitive
+
{% for key, value in mydict|dictsort(false, 'value') %}
- sort the dict by value, case insensitive
- """
+ sort the dict by value, case insensitive
+ """
if by == "key":
- pos = 0
+ pos = 0
elif by == "value":
- pos = 1
- else:
+ pos = 1
+ else:
raise FilterArgumentError('You can only sort by either "key" or "value"')
-
- def sort_func(item):
- value = item[pos]
-
- if not case_sensitive:
- value = ignore_case(value)
-
- return value
-
- return sorted(value.items(), key=sort_func, reverse=reverse)
-
-
-@environmentfilter
+
+ def sort_func(item):
+ value = item[pos]
+
+ if not case_sensitive:
+ value = ignore_case(value)
+
+ return value
+
+ return sorted(value.items(), key=sort_func, reverse=reverse)
+
+
+@environmentfilter
def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=None):
"""Sort an iterable using Python's :func:`sorted`.
-
+
.. sourcecode:: jinja
-
+
{% for city in cities|sort %}
...
{% endfor %}
@@ -319,201 +319,201 @@ def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=N
elements that compare equal. This makes it is possible to chain
sorts on different attributes and ordering.
- .. sourcecode:: jinja
-
+ .. sourcecode:: jinja
+
{% for user in users|sort(attribute="name")
|sort(reverse=true, attribute="age") %}
- ...
- {% endfor %}
-
+ ...
+ {% endfor %}
+
As a shortcut to chaining when the direction is the same for all
attributes, pass a comma separate list of attributes.
-
- .. sourcecode:: jinja
-
+
+ .. sourcecode:: jinja
+
{% for user users|sort(attribute="age,name") %}
- ...
- {% endfor %}
-
+ ...
+ {% endfor %}
+
.. versionchanged:: 2.11.0
The ``attribute`` parameter can be a comma separated list of
attributes, e.g. ``"age,name"``.
- .. versionchanged:: 2.6
+ .. versionchanged:: 2.6
The ``attribute`` parameter was added.
- """
+ """
key_func = make_multi_attrgetter(
environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- return sorted(value, key=key_func, reverse=reverse)
-
-
-@environmentfilter
-def do_unique(environment, value, case_sensitive=False, attribute=None):
+ )
+ return sorted(value, key=key_func, reverse=reverse)
+
+
+@environmentfilter
+def do_unique(environment, value, case_sensitive=False, attribute=None):
"""Returns a list of unique items from the given iterable.
-
- .. sourcecode:: jinja
-
+
+ .. sourcecode:: jinja
+
{{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
- -> ['foo', 'bar', 'foobar']
-
- The unique items are yielded in the same order as their first occurrence in
- the iterable passed to the filter.
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Filter objects with unique values for this attribute.
- """
- getter = make_attrgetter(
+ -> ['foo', 'bar', 'foobar']
+
+ The unique items are yielded in the same order as their first occurrence in
+ the iterable passed to the filter.
+
+ :param case_sensitive: Treat upper and lower case strings as distinct.
+ :param attribute: Filter objects with unique values for this attribute.
+ """
+ getter = make_attrgetter(
environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- seen = set()
-
- for item in value:
- key = getter(item)
-
- if key not in seen:
- seen.add(key)
- yield item
-
-
-def _min_or_max(environment, value, func, case_sensitive, attribute):
- it = iter(value)
-
- try:
- first = next(it)
- except StopIteration:
+ )
+ seen = set()
+
+ for item in value:
+ key = getter(item)
+
+ if key not in seen:
+ seen.add(key)
+ yield item
+
+
+def _min_or_max(environment, value, func, case_sensitive, attribute):
+ it = iter(value)
+
+ try:
+ first = next(it)
+ except StopIteration:
return environment.undefined("No aggregated item, sequence was empty.")
-
- key_func = make_attrgetter(
+
+ key_func = make_attrgetter(
environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- return func(chain([first], it), key=key_func)
-
-
-@environmentfilter
-def do_min(environment, value, case_sensitive=False, attribute=None):
- """Return the smallest item from the sequence.
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|min }}
- -> 1
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
+ )
+ return func(chain([first], it), key=key_func)
+
+
+@environmentfilter
+def do_min(environment, value, case_sensitive=False, attribute=None):
+ """Return the smallest item from the sequence.
+
+ .. sourcecode:: jinja
+
+ {{ [1, 2, 3]|min }}
+ -> 1
+
+ :param case_sensitive: Treat upper and lower case strings as distinct.
:param attribute: Get the object with the min value of this attribute.
- """
- return _min_or_max(environment, value, min, case_sensitive, attribute)
-
-
-@environmentfilter
-def do_max(environment, value, case_sensitive=False, attribute=None):
- """Return the largest item from the sequence.
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|max }}
- -> 3
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Get the object with the max value of this attribute.
- """
- return _min_or_max(environment, value, max, case_sensitive, attribute)
-
-
+ """
+ return _min_or_max(environment, value, min, case_sensitive, attribute)
+
+
+@environmentfilter
+def do_max(environment, value, case_sensitive=False, attribute=None):
+ """Return the largest item from the sequence.
+
+ .. sourcecode:: jinja
+
+ {{ [1, 2, 3]|max }}
+ -> 3
+
+ :param case_sensitive: Treat upper and lower case strings as distinct.
+ :param attribute: Get the object with the max value of this attribute.
+ """
+ return _min_or_max(environment, value, max, case_sensitive, attribute)
+
+
def do_default(value, default_value=u"", boolean=False):
- """If the value is undefined it will return the passed default value,
- otherwise the value of the variable:
-
- .. sourcecode:: jinja
-
- {{ my_variable|default('my_variable is not defined') }}
-
- This will output the value of ``my_variable`` if the variable was
- defined, otherwise ``'my_variable is not defined'``. If you want
- to use default with variables that evaluate to false you have to
- set the second parameter to `true`:
-
- .. sourcecode:: jinja
-
- {{ ''|default('the string was empty', true) }}
+ """If the value is undefined it will return the passed default value,
+ otherwise the value of the variable:
+
+ .. sourcecode:: jinja
+
+ {{ my_variable|default('my_variable is not defined') }}
+
+ This will output the value of ``my_variable`` if the variable was
+ defined, otherwise ``'my_variable is not defined'``. If you want
+ to use default with variables that evaluate to false you have to
+ set the second parameter to `true`:
+
+ .. sourcecode:: jinja
+
+ {{ ''|default('the string was empty', true) }}
.. versionchanged:: 2.11
It's now possible to configure the :class:`~jinja2.Environment` with
:class:`~jinja2.ChainableUndefined` to make the `default` filter work
on nested elements and attributes that may contain undefined values
in the chain without getting an :exc:`~jinja2.UndefinedError`.
- """
- if isinstance(value, Undefined) or (boolean and not value):
- return default_value
- return value
-
-
-@evalcontextfilter
+ """
+ if isinstance(value, Undefined) or (boolean and not value):
+ return default_value
+ return value
+
+
+@evalcontextfilter
def do_join(eval_ctx, value, d=u"", attribute=None):
- """Return a string which is the concatenation of the strings in the
- sequence. The separator between elements is an empty string per
- default, you can define it with the optional parameter:
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|join('|') }}
- -> 1|2|3
-
- {{ [1, 2, 3]|join }}
- -> 123
-
- It is also possible to join certain attributes of an object:
-
- .. sourcecode:: jinja
-
- {{ users|join(', ', attribute='username') }}
-
- .. versionadded:: 2.6
- The `attribute` parameter was added.
- """
- if attribute is not None:
- value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
-
+ """Return a string which is the concatenation of the strings in the
+ sequence. The separator between elements is an empty string per
+ default, you can define it with the optional parameter:
+
+ .. sourcecode:: jinja
+
+ {{ [1, 2, 3]|join('|') }}
+ -> 1|2|3
+
+ {{ [1, 2, 3]|join }}
+ -> 123
+
+ It is also possible to join certain attributes of an object:
+
+ .. sourcecode:: jinja
+
+ {{ users|join(', ', attribute='username') }}
+
+ .. versionadded:: 2.6
+ The `attribute` parameter was added.
+ """
+ if attribute is not None:
+ value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
+
# no automatic escaping? joining is a lot easier then
- if not eval_ctx.autoescape:
- return text_type(d).join(imap(text_type, value))
-
- # if the delimiter doesn't have an html representation we check
- # if any of the items has. If yes we do a coercion to Markup
+ if not eval_ctx.autoescape:
+ return text_type(d).join(imap(text_type, value))
+
+ # if the delimiter doesn't have an html representation we check
+ # if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, "__html__"):
- value = list(value)
- do_escape = False
- for idx, item in enumerate(value):
+ value = list(value)
+ do_escape = False
+ for idx, item in enumerate(value):
if hasattr(item, "__html__"):
- do_escape = True
- else:
- value[idx] = text_type(item)
- if do_escape:
- d = escape(d)
- else:
- d = text_type(d)
- return d.join(value)
-
- # no html involved, to normal joining
- return soft_unicode(d).join(imap(soft_unicode, value))
-
-
-def do_center(value, width=80):
- """Centers the value in a field of a given width."""
- return text_type(value).center(width)
-
-
-@environmentfilter
-def do_first(environment, seq):
- """Return the first item of a sequence."""
- try:
- return next(iter(seq))
- except StopIteration:
+ do_escape = True
+ else:
+ value[idx] = text_type(item)
+ if do_escape:
+ d = escape(d)
+ else:
+ d = text_type(d)
+ return d.join(value)
+
+ # no html involved, to normal joining
+ return soft_unicode(d).join(imap(soft_unicode, value))
+
+
+def do_center(value, width=80):
+ """Centers the value in a field of a given width."""
+ return text_type(value).center(width)
+
+
+@environmentfilter
+def do_first(environment, seq):
+ """Return the first item of a sequence."""
+ try:
+ return next(iter(seq))
+ except StopIteration:
return environment.undefined("No first item, sequence was empty.")
-
-
-@environmentfilter
-def do_last(environment, seq):
+
+
+@environmentfilter
+def do_last(environment, seq):
"""
Return the last item of a sequence.
@@ -524,30 +524,30 @@ def do_last(environment, seq):
{{ data | selectattr('name', '==', 'Jinja') | list | last }}
"""
- try:
- return next(iter(reversed(seq)))
- except StopIteration:
+ try:
+ return next(iter(reversed(seq)))
+ except StopIteration:
return environment.undefined("No last item, sequence was empty.")
-
-
-@contextfilter
-def do_random(context, seq):
- """Return a random item from the sequence."""
- try:
- return random.choice(seq)
- except IndexError:
+
+
+@contextfilter
+def do_random(context, seq):
+ """Return a random item from the sequence."""
+ try:
+ return random.choice(seq)
+ except IndexError:
return context.environment.undefined("No random item, sequence was empty.")
-
-
-def do_filesizeformat(value, binary=False):
- """Format the value like a 'human-readable' file size (i.e. 13 kB,
- 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
- Giga, etc.), if the second parameter is set to `True` the binary
- prefixes are used (Mebi, Gibi).
- """
- bytes = float(value)
- base = binary and 1024 or 1000
- prefixes = [
+
+
+def do_filesizeformat(value, binary=False):
+ """Format the value like a 'human-readable' file size (i.e. 13 kB,
+ 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
+ Giga, etc.), if the second parameter is set to `True` the binary
+ prefixes are used (Mebi, Gibi).
+ """
+ bytes = float(value)
+ base = binary and 1024 or 1000
+ prefixes = [
(binary and "KiB" or "kB"),
(binary and "MiB" or "MB"),
(binary and "GiB" or "GB"),
@@ -556,153 +556,153 @@ def do_filesizeformat(value, binary=False):
(binary and "EiB" or "EB"),
(binary and "ZiB" or "ZB"),
(binary and "YiB" or "YB"),
- ]
- if bytes == 1:
+ ]
+ if bytes == 1:
return "1 Byte"
- elif bytes < base:
+ elif bytes < base:
return "%d Bytes" % bytes
- else:
- for i, prefix in enumerate(prefixes):
- unit = base ** (i + 2)
- if bytes < unit:
+ else:
+ for i, prefix in enumerate(prefixes):
+ unit = base ** (i + 2)
+ if bytes < unit:
return "%.1f %s" % ((base * bytes / unit), prefix)
return "%.1f %s" % ((base * bytes / unit), prefix)
-
-
-def do_pprint(value, verbose=False):
- """Pretty print a variable. Useful for debugging.
-
- With Jinja 1.2 onwards you can pass it a parameter. If this parameter
- is truthy the output will be more verbose (this requires `pretty`)
- """
- return pformat(value, verbose=verbose)
-
-
-@evalcontextfilter
+
+
+def do_pprint(value, verbose=False):
+ """Pretty print a variable. Useful for debugging.
+
+ With Jinja 1.2 onwards you can pass it a parameter. If this parameter
+ is truthy the output will be more verbose (this requires `pretty`)
+ """
+ return pformat(value, verbose=verbose)
+
+
+@evalcontextfilter
def do_urlize(
eval_ctx, value, trim_url_limit=None, nofollow=False, target=None, rel=None
):
- """Converts URLs in plain text into clickable links.
-
- If you pass the filter an additional integer it will shorten the urls
- to that number. Also a third argument exists that makes the urls
- "nofollow":
-
- .. sourcecode:: jinja
-
- {{ mytext|urlize(40, true) }}
- links are shortened to 40 chars and defined with rel="nofollow"
-
- If *target* is specified, the ``target`` attribute will be added to the
- ``<a>`` tag:
-
- .. sourcecode:: jinja
-
- {{ mytext|urlize(40, target='_blank') }}
-
- .. versionchanged:: 2.8+
- The *target* parameter was added.
- """
- policies = eval_ctx.environment.policies
+ """Converts URLs in plain text into clickable links.
+
+ If you pass the filter an additional integer it will shorten the urls
+ to that number. Also a third argument exists that makes the urls
+ "nofollow":
+
+ .. sourcecode:: jinja
+
+ {{ mytext|urlize(40, true) }}
+ links are shortened to 40 chars and defined with rel="nofollow"
+
+ If *target* is specified, the ``target`` attribute will be added to the
+ ``<a>`` tag:
+
+ .. sourcecode:: jinja
+
+ {{ mytext|urlize(40, target='_blank') }}
+
+ .. versionchanged:: 2.8+
+ The *target* parameter was added.
+ """
+ policies = eval_ctx.environment.policies
rel = set((rel or "").split() or [])
- if nofollow:
+ if nofollow:
rel.add("nofollow")
rel.update((policies["urlize.rel"] or "").split())
- if target is None:
+ if target is None:
target = policies["urlize.target"]
rel = " ".join(sorted(rel)) or None
- rv = urlize(value, trim_url_limit, rel=rel, target=target)
- if eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
+ rv = urlize(value, trim_url_limit, rel=rel, target=target)
+ if eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+
def do_indent(s, width=4, first=False, blank=False, indentfirst=None):
- """Return a copy of the string with each line indented by 4 spaces. The
- first line and blank lines are not indented by default.
-
- :param width: Number of spaces to indent by.
- :param first: Don't skip indenting the first line.
- :param blank: Don't skip indenting empty lines.
-
- .. versionchanged:: 2.10
- Blank lines are not indented by default.
-
- Rename the ``indentfirst`` argument to ``first``.
- """
- if indentfirst is not None:
+ """Return a copy of the string with each line indented by 4 spaces. The
+ first line and blank lines are not indented by default.
+
+ :param width: Number of spaces to indent by.
+ :param first: Don't skip indenting the first line.
+ :param blank: Don't skip indenting empty lines.
+
+ .. versionchanged:: 2.10
+ Blank lines are not indented by default.
+
+ Rename the ``indentfirst`` argument to ``first``.
+ """
+ if indentfirst is not None:
warnings.warn(
"The 'indentfirst' argument is renamed to 'first' and will"
" be removed in version 3.0.",
DeprecationWarning,
stacklevel=2,
)
- first = indentfirst
-
+ first = indentfirst
+
indention = u" " * width
newline = u"\n"
-
+
if isinstance(s, Markup):
indention = Markup(indention)
newline = Markup(newline)
s += newline # this quirk is necessary for splitlines method
- if blank:
+ if blank:
rv = (newline + indention).join(s.splitlines())
- else:
- lines = s.splitlines()
- rv = lines.pop(0)
-
- if lines:
+ else:
+ lines = s.splitlines()
+ rv = lines.pop(0)
+
+ if lines:
rv += newline + newline.join(
- indention + line if line else line for line in lines
- )
-
- if first:
- rv = indention + rv
-
- return rv
-
-
-@environmentfilter
+ indention + line if line else line for line in lines
+ )
+
+ if first:
+ rv = indention + rv
+
+ return rv
+
+
+@environmentfilter
def do_truncate(env, s, length=255, killwords=False, end="...", leeway=None):
- """Return a truncated copy of the string. The length is specified
- with the first parameter which defaults to ``255``. If the second
- parameter is ``true`` the filter will cut the text at length. Otherwise
- it will discard the last word. If the text was in fact
- truncated it will append an ellipsis sign (``"..."``). If you want a
- different ellipsis sign than ``"..."`` you can specify it using the
- third parameter. Strings that only exceed the length by the tolerance
- margin given in the fourth parameter will not be truncated.
-
- .. sourcecode:: jinja
-
- {{ "foo bar baz qux"|truncate(9) }}
- -> "foo..."
- {{ "foo bar baz qux"|truncate(9, True) }}
- -> "foo ba..."
- {{ "foo bar baz qux"|truncate(11) }}
- -> "foo bar baz qux"
- {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
- -> "foo bar..."
-
+ """Return a truncated copy of the string. The length is specified
+ with the first parameter which defaults to ``255``. If the second
+ parameter is ``true`` the filter will cut the text at length. Otherwise
+ it will discard the last word. If the text was in fact
+ truncated it will append an ellipsis sign (``"..."``). If you want a
+ different ellipsis sign than ``"..."`` you can specify it using the
+ third parameter. Strings that only exceed the length by the tolerance
+ margin given in the fourth parameter will not be truncated.
+
+ .. sourcecode:: jinja
+
+ {{ "foo bar baz qux"|truncate(9) }}
+ -> "foo..."
+ {{ "foo bar baz qux"|truncate(9, True) }}
+ -> "foo ba..."
+ {{ "foo bar baz qux"|truncate(11) }}
+ -> "foo bar baz qux"
+ {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
+ -> "foo bar..."
+
The default leeway on newer Jinja versions is 5 and was 0 before but
- can be reconfigured globally.
- """
- if leeway is None:
+ can be reconfigured globally.
+ """
+ if leeway is None:
leeway = env.policies["truncate.leeway"]
assert length >= len(end), "expected length >= %s, got %s" % (len(end), length)
assert leeway >= 0, "expected leeway >= 0, got %s" % leeway
- if len(s) <= length + leeway:
- return s
- if killwords:
+ if len(s) <= length + leeway:
+ return s
+ if killwords:
return s[: length - len(end)] + end
result = s[: length - len(end)].rsplit(" ", 1)[0]
- return result + end
-
-
-@environmentfilter
+ return result + end
+
+
+@environmentfilter
def do_wordwrap(
environment,
s,
@@ -731,13 +731,13 @@ def do_wordwrap(
.. versionchanged:: 2.7
Added the ``wrapstring`` parameter.
- """
-
+ """
+
import textwrap
- if not wrapstring:
- wrapstring = environment.newline_sequence
-
+ if not wrapstring:
+ wrapstring = environment.newline_sequence
+
# textwrap.wrap doesn't consider existing newlines when wrapping.
# If the string has a newline before width, wrap will still insert
# a newline at width, resulting in a short line. Instead, split and
@@ -757,51 +757,51 @@ def do_wordwrap(
for line in s.splitlines()
]
)
+
-
-def do_wordcount(s):
- """Count the words in that string."""
+def do_wordcount(s):
+ """Count the words in that string."""
return len(_word_re.findall(soft_unicode(s)))
-
-
-def do_int(value, default=0, base=10):
- """Convert the value into an integer. If the
- conversion doesn't work it will return ``0``. You can
- override this default using the first parameter. You
- can also override the default base (10) in the second
- parameter, which handles input with prefixes such as
- 0b, 0o and 0x for bases 2, 8 and 16 respectively.
- The base is ignored for decimal numbers and non-string values.
- """
- try:
- if isinstance(value, string_types):
- return int(value, base)
- return int(value)
- except (TypeError, ValueError):
- # this quirk is necessary so that "42.23"|int gives 42.
- try:
- return int(float(value))
- except (TypeError, ValueError):
- return default
-
-
-def do_float(value, default=0.0):
- """Convert the value into a floating point number. If the
- conversion doesn't work it will return ``0.0``. You can
- override this default using the first parameter.
- """
- try:
- return float(value)
- except (TypeError, ValueError):
- return default
-
-
-def do_format(value, *args, **kwargs):
+
+
+def do_int(value, default=0, base=10):
+ """Convert the value into an integer. If the
+ conversion doesn't work it will return ``0``. You can
+ override this default using the first parameter. You
+ can also override the default base (10) in the second
+ parameter, which handles input with prefixes such as
+ 0b, 0o and 0x for bases 2, 8 and 16 respectively.
+ The base is ignored for decimal numbers and non-string values.
+ """
+ try:
+ if isinstance(value, string_types):
+ return int(value, base)
+ return int(value)
+ except (TypeError, ValueError):
+ # this quirk is necessary so that "42.23"|int gives 42.
+ try:
+ return int(float(value))
+ except (TypeError, ValueError):
+ return default
+
+
+def do_float(value, default=0.0):
+ """Convert the value into a floating point number. If the
+ conversion doesn't work it will return ``0.0``. You can
+ override this default using the first parameter.
+ """
+ try:
+ return float(value)
+ except (TypeError, ValueError):
+ return default
+
+
+def do_format(value, *args, **kwargs):
"""Apply the given values to a `printf-style`_ format string, like
``string % values``.
-
- .. sourcecode:: jinja
-
+
+ .. sourcecode:: jinja
+
{{ "%s, %s!"|format(greeting, name) }}
Hello, World!
@@ -815,151 +815,151 @@ def do_format(value, *args, **kwargs):
.. _printf-style: https://docs.python.org/library/stdtypes.html
#printf-style-string-formatting
- """
- if args and kwargs:
+ """
+ if args and kwargs:
raise FilterArgumentError(
"can't handle positional and keyword arguments at the same time"
)
- return soft_unicode(value) % (kwargs or args)
-
-
+ return soft_unicode(value) % (kwargs or args)
+
+
def do_trim(value, chars=None):
"""Strip leading and trailing characters, by default whitespace."""
return soft_unicode(value).strip(chars)
-
-
-def do_striptags(value):
+
+
+def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space."""
if hasattr(value, "__html__"):
- value = value.__html__()
- return Markup(text_type(value)).striptags()
-
-
-def do_slice(value, slices, fill_with=None):
- """Slice an iterator and return a list of lists containing
- those items. Useful if you want to create a div containing
- three ul tags that represent columns:
-
- .. sourcecode:: html+jinja
-
+ value = value.__html__()
+ return Markup(text_type(value)).striptags()
+
+
+def do_slice(value, slices, fill_with=None):
+ """Slice an iterator and return a list of lists containing
+ those items. Useful if you want to create a div containing
+ three ul tags that represent columns:
+
+ .. sourcecode:: html+jinja
+
<div class="columnwrapper">
- {%- for column in items|slice(3) %}
- <ul class="column-{{ loop.index }}">
- {%- for item in column %}
- <li>{{ item }}</li>
- {%- endfor %}
- </ul>
- {%- endfor %}
- </div>
-
- If you pass it a second argument it's used to fill missing
- values on the last iteration.
- """
- seq = list(value)
- length = len(seq)
- items_per_slice = length // slices
- slices_with_extra = length % slices
- offset = 0
- for slice_number in range(slices):
- start = offset + slice_number * items_per_slice
- if slice_number < slices_with_extra:
- offset += 1
- end = offset + (slice_number + 1) * items_per_slice
- tmp = seq[start:end]
- if fill_with is not None and slice_number >= slices_with_extra:
- tmp.append(fill_with)
- yield tmp
-
-
-def do_batch(value, linecount, fill_with=None):
- """
- A filter that batches items. It works pretty much like `slice`
- just the other way round. It returns a list of lists with the
- given number of items. If you provide a second parameter this
- is used to fill up missing items. See this example:
-
- .. sourcecode:: html+jinja
-
- <table>
- {%- for row in items|batch(3, '&nbsp;') %}
- <tr>
- {%- for column in row %}
- <td>{{ column }}</td>
- {%- endfor %}
- </tr>
- {%- endfor %}
- </table>
- """
- tmp = []
- for item in value:
- if len(tmp) == linecount:
- yield tmp
- tmp = []
- tmp.append(item)
- if tmp:
- if fill_with is not None and len(tmp) < linecount:
- tmp += [fill_with] * (linecount - len(tmp))
- yield tmp
-
-
+ {%- for column in items|slice(3) %}
+ <ul class="column-{{ loop.index }}">
+ {%- for item in column %}
+ <li>{{ item }}</li>
+ {%- endfor %}
+ </ul>
+ {%- endfor %}
+ </div>
+
+ If you pass it a second argument it's used to fill missing
+ values on the last iteration.
+ """
+ seq = list(value)
+ length = len(seq)
+ items_per_slice = length // slices
+ slices_with_extra = length % slices
+ offset = 0
+ for slice_number in range(slices):
+ start = offset + slice_number * items_per_slice
+ if slice_number < slices_with_extra:
+ offset += 1
+ end = offset + (slice_number + 1) * items_per_slice
+ tmp = seq[start:end]
+ if fill_with is not None and slice_number >= slices_with_extra:
+ tmp.append(fill_with)
+ yield tmp
+
+
+def do_batch(value, linecount, fill_with=None):
+ """
+ A filter that batches items. It works pretty much like `slice`
+ just the other way round. It returns a list of lists with the
+ given number of items. If you provide a second parameter this
+ is used to fill up missing items. See this example:
+
+ .. sourcecode:: html+jinja
+
+ <table>
+ {%- for row in items|batch(3, '&nbsp;') %}
+ <tr>
+ {%- for column in row %}
+ <td>{{ column }}</td>
+ {%- endfor %}
+ </tr>
+ {%- endfor %}
+ </table>
+ """
+ tmp = []
+ for item in value:
+ if len(tmp) == linecount:
+ yield tmp
+ tmp = []
+ tmp.append(item)
+ if tmp:
+ if fill_with is not None and len(tmp) < linecount:
+ tmp += [fill_with] * (linecount - len(tmp))
+ yield tmp
+
+
def do_round(value, precision=0, method="common"):
- """Round the number to a given precision. The first
- parameter specifies the precision (default is ``0``), the
- second the rounding method:
-
- - ``'common'`` rounds either up or down
- - ``'ceil'`` always rounds up
- - ``'floor'`` always rounds down
-
- If you don't specify a method ``'common'`` is used.
-
- .. sourcecode:: jinja
-
- {{ 42.55|round }}
- -> 43.0
- {{ 42.55|round(1, 'floor') }}
- -> 42.5
-
- Note that even if rounded to 0 precision, a float is returned. If
- you need a real integer, pipe it through `int`:
-
- .. sourcecode:: jinja
-
- {{ 42.55|round|int }}
- -> 43
- """
+ """Round the number to a given precision. The first
+ parameter specifies the precision (default is ``0``), the
+ second the rounding method:
+
+ - ``'common'`` rounds either up or down
+ - ``'ceil'`` always rounds up
+ - ``'floor'`` always rounds down
+
+ If you don't specify a method ``'common'`` is used.
+
+ .. sourcecode:: jinja
+
+ {{ 42.55|round }}
+ -> 43.0
+ {{ 42.55|round(1, 'floor') }}
+ -> 42.5
+
+ Note that even if rounded to 0 precision, a float is returned. If
+ you need a real integer, pipe it through `int`:
+
+ .. sourcecode:: jinja
+
+ {{ 42.55|round|int }}
+ -> 43
+ """
if method not in {"common", "ceil", "floor"}:
raise FilterArgumentError("method must be common, ceil or floor")
if method == "common":
- return round(value, precision)
- func = getattr(math, method)
- return func(value * (10 ** precision)) / (10 ** precision)
-
-
-# Use a regular tuple repr here. This is what we did in the past and we
-# really want to hide this custom type as much as possible. In particular
-# we do not want to accidentally expose an auto generated repr in case
-# people start to print this out in comments or something similar for
-# debugging.
+ return round(value, precision)
+ func = getattr(math, method)
+ return func(value * (10 ** precision)) / (10 ** precision)
+
+
+# Use a regular tuple repr here. This is what we did in the past and we
+# really want to hide this custom type as much as possible. In particular
+# we do not want to accidentally expose an auto generated repr in case
+# people start to print this out in comments or something similar for
+# debugging.
_GroupTuple = namedtuple("_GroupTuple", ["grouper", "list"])
-_GroupTuple.__repr__ = tuple.__repr__
-_GroupTuple.__str__ = tuple.__str__
-
+_GroupTuple.__repr__ = tuple.__repr__
+_GroupTuple.__str__ = tuple.__str__
+
-@environmentfilter
-def do_groupby(environment, value, attribute):
+@environmentfilter
+def do_groupby(environment, value, attribute):
"""Group a sequence of objects by an attribute using Python's
:func:`itertools.groupby`. The attribute can use dot notation for
nested access, like ``"address.city"``. Unlike Python's ``groupby``,
the values are sorted first so only one group is returned for each
unique value.
-
+
For example, a list of ``User`` objects with a ``city`` attribute
can be rendered in groups. In this example, ``grouper`` refers to
the ``city`` value of the group.
-
- .. sourcecode:: html+jinja
-
+
+ .. sourcecode:: html+jinja
+
<ul>{% for city, items in users|groupby("city") %}
<li>{{ city }}
<ul>{% for user in items %}
@@ -967,123 +967,123 @@ def do_groupby(environment, value, attribute):
{% endfor %}</ul>
</li>
{% endfor %}</ul>
-
+
``groupby`` yields namedtuples of ``(grouper, list)``, which
can be used instead of the tuple unpacking above. ``grouper`` is the
value of the attribute, and ``list`` is the items with that value.
-
- .. sourcecode:: html+jinja
-
+
+ .. sourcecode:: html+jinja
+
<ul>{% for group in users|groupby("city") %}
<li>{{ group.grouper }}: {{ group.list|join(", ") }}
{% endfor %}</ul>
-
- .. versionchanged:: 2.6
+
+ .. versionchanged:: 2.6
The attribute supports dot notation for nested access.
- """
- expr = make_attrgetter(environment, attribute)
+ """
+ expr = make_attrgetter(environment, attribute)
return [
_GroupTuple(key, list(values))
for key, values in groupby(sorted(value, key=expr), expr)
]
-
-
-@environmentfilter
-def do_sum(environment, iterable, attribute=None, start=0):
- """Returns the sum of a sequence of numbers plus the value of parameter
- 'start' (which defaults to 0). When the sequence is empty it returns
- start.
-
- It is also possible to sum up only certain attributes:
-
- .. sourcecode:: jinja
-
- Total: {{ items|sum(attribute='price') }}
-
- .. versionchanged:: 2.6
- The `attribute` parameter was added to allow suming up over
- attributes. Also the `start` parameter was moved on to the right.
- """
- if attribute is not None:
- iterable = imap(make_attrgetter(environment, attribute), iterable)
- return sum(iterable, start)
-
-
-def do_list(value):
- """Convert the value into a list. If it was a string the returned list
- will be a list of characters.
- """
- return list(value)
-
-
-def do_mark_safe(value):
- """Mark the value as safe which means that in an environment with automatic
- escaping enabled this variable will not be escaped.
- """
- return Markup(value)
-
-
-def do_mark_unsafe(value):
- """Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
- return text_type(value)
-
-
-def do_reverse(value):
- """Reverse the object or return an iterator that iterates over it the other
- way round.
- """
- if isinstance(value, string_types):
- return value[::-1]
- try:
- return reversed(value)
- except TypeError:
- try:
- rv = list(value)
- rv.reverse()
- return rv
- except TypeError:
+
+
+@environmentfilter
+def do_sum(environment, iterable, attribute=None, start=0):
+ """Returns the sum of a sequence of numbers plus the value of parameter
+ 'start' (which defaults to 0). When the sequence is empty it returns
+ start.
+
+ It is also possible to sum up only certain attributes:
+
+ .. sourcecode:: jinja
+
+ Total: {{ items|sum(attribute='price') }}
+
+ .. versionchanged:: 2.6
+ The `attribute` parameter was added to allow suming up over
+ attributes. Also the `start` parameter was moved on to the right.
+ """
+ if attribute is not None:
+ iterable = imap(make_attrgetter(environment, attribute), iterable)
+ return sum(iterable, start)
+
+
+def do_list(value):
+ """Convert the value into a list. If it was a string the returned list
+ will be a list of characters.
+ """
+ return list(value)
+
+
+def do_mark_safe(value):
+ """Mark the value as safe which means that in an environment with automatic
+ escaping enabled this variable will not be escaped.
+ """
+ return Markup(value)
+
+
+def do_mark_unsafe(value):
+ """Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
+ return text_type(value)
+
+
+def do_reverse(value):
+ """Reverse the object or return an iterator that iterates over it the other
+ way round.
+ """
+ if isinstance(value, string_types):
+ return value[::-1]
+ try:
+ return reversed(value)
+ except TypeError:
+ try:
+ rv = list(value)
+ rv.reverse()
+ return rv
+ except TypeError:
raise FilterArgumentError("argument must be iterable")
-
-
-@environmentfilter
-def do_attr(environment, obj, name):
- """Get an attribute of an object. ``foo|attr("bar")`` works like
- ``foo.bar`` just that always an attribute is returned and items are not
- looked up.
-
- See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
- """
- try:
- name = str(name)
- except UnicodeError:
- pass
- else:
- try:
- value = getattr(obj, name)
- except AttributeError:
- pass
- else:
+
+
+@environmentfilter
+def do_attr(environment, obj, name):
+ """Get an attribute of an object. ``foo|attr("bar")`` works like
+ ``foo.bar`` just that always an attribute is returned and items are not
+ looked up.
+
+ See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
+ """
+ try:
+ name = str(name)
+ except UnicodeError:
+ pass
+ else:
+ try:
+ value = getattr(obj, name)
+ except AttributeError:
+ pass
+ else:
if environment.sandboxed and not environment.is_safe_attribute(
obj, name, value
):
- return environment.unsafe_undefined(obj, name)
- return value
- return environment.undefined(obj=obj, name=name)
-
-
-@contextfilter
-def do_map(*args, **kwargs):
- """Applies a filter on a sequence of objects or looks up an attribute.
- This is useful when dealing with lists of objects but you are really
- only interested in a certain value of it.
-
- The basic usage is mapping on an attribute. Imagine you have a list
- of users but you are only interested in a list of usernames:
-
- .. sourcecode:: jinja
-
- Users on this page: {{ users|map(attribute='username')|join(', ') }}
-
+ return environment.unsafe_undefined(obj, name)
+ return value
+ return environment.undefined(obj=obj, name=name)
+
+
+@contextfilter
+def do_map(*args, **kwargs):
+ """Applies a filter on a sequence of objects or looks up an attribute.
+ This is useful when dealing with lists of objects but you are really
+ only interested in a certain value of it.
+
+ The basic usage is mapping on an attribute. Imagine you have a list
+ of users but you are only interested in a list of usernames:
+
+ .. sourcecode:: jinja
+
+ Users on this page: {{ users|map(attribute='username')|join(', ') }}
+
You can specify a ``default`` value to use if an object in the list
does not have the given attribute.
@@ -1091,14 +1091,14 @@ def do_map(*args, **kwargs):
{{ users|map(attribute="username", default="Anonymous")|join(", ") }}
- Alternatively you can let it invoke a filter by passing the name of the
- filter and the arguments afterwards. A good example would be applying a
- text conversion filter on a sequence:
-
- .. sourcecode:: jinja
-
- Users on this page: {{ titles|map('lower')|join(', ') }}
-
+ Alternatively you can let it invoke a filter by passing the name of the
+ filter and the arguments afterwards. A good example would be applying a
+ text conversion filter on a sequence:
+
+ .. sourcecode:: jinja
+
+ Users on this page: {{ titles|map('lower')|join(', ') }}
+
Similar to a generator comprehension such as:
.. code-block:: python
@@ -1110,31 +1110,31 @@ def do_map(*args, **kwargs):
.. versionchanged:: 2.11.0
Added the ``default`` parameter.
- .. versionadded:: 2.7
- """
- seq, func = prepare_map(args, kwargs)
- if seq:
- for item in seq:
- yield func(item)
-
-
-@contextfilter
-def do_select(*args, **kwargs):
- """Filters a sequence of objects by applying a test to each object,
- and only selecting the objects with the test succeeding.
-
- If no test is specified, each object will be evaluated as a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ numbers|select("odd") }}
- {{ numbers|select("odd") }}
- {{ numbers|select("divisibleby", 3) }}
- {{ numbers|select("lessthan", 42) }}
- {{ strings|select("equalto", "mystring") }}
-
+ .. versionadded:: 2.7
+ """
+ seq, func = prepare_map(args, kwargs)
+ if seq:
+ for item in seq:
+ yield func(item)
+
+
+@contextfilter
+def do_select(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to each object,
+ and only selecting the objects with the test succeeding.
+
+ If no test is specified, each object will be evaluated as a boolean.
+
+ Example usage:
+
+ .. sourcecode:: jinja
+
+ {{ numbers|select("odd") }}
+ {{ numbers|select("odd") }}
+ {{ numbers|select("divisibleby", 3) }}
+ {{ numbers|select("lessthan", 42) }}
+ {{ strings|select("equalto", "mystring") }}
+
Similar to a generator comprehension such as:
.. code-block:: python
@@ -1142,51 +1142,51 @@ def do_select(*args, **kwargs):
(n for n in numbers if test_odd(n))
(n for n in numbers if test_divisibleby(n, 3))
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: x, False)
-
-
-@contextfilter
-def do_reject(*args, **kwargs):
- """Filters a sequence of objects by applying a test to each object,
- and rejecting the objects with the test succeeding.
-
- If no test is specified, each object will be evaluated as a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ numbers|reject("odd") }}
-
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: x, False)
+
+
+@contextfilter
+def do_reject(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to each object,
+ and rejecting the objects with the test succeeding.
+
+ If no test is specified, each object will be evaluated as a boolean.
+
+ Example usage:
+
+ .. sourcecode:: jinja
+
+ {{ numbers|reject("odd") }}
+
Similar to a generator comprehension such as:
.. code-block:: python
(n for n in numbers if not test_odd(n))
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: not x, False)
-
-
-@contextfilter
-def do_selectattr(*args, **kwargs):
- """Filters a sequence of objects by applying a test to the specified
- attribute of each object, and only selecting the objects with the
- test succeeding.
-
- If no test is specified, the attribute's value will be evaluated as
- a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ users|selectattr("is_active") }}
- {{ users|selectattr("email", "none") }}
-
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: not x, False)
+
+
+@contextfilter
+def do_selectattr(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to the specified
+ attribute of each object, and only selecting the objects with the
+ test succeeding.
+
+ If no test is specified, the attribute's value will be evaluated as
+ a boolean.
+
+ Example usage:
+
+ .. sourcecode:: jinja
+
+ {{ users|selectattr("is_active") }}
+ {{ users|selectattr("email", "none") }}
+
Similar to a generator comprehension such as:
.. code-block:: python
@@ -1194,25 +1194,25 @@ def do_selectattr(*args, **kwargs):
(u for user in users if user.is_active)
(u for user in users if test_none(user.email))
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: x, True)
-
-
-@contextfilter
-def do_rejectattr(*args, **kwargs):
- """Filters a sequence of objects by applying a test to the specified
- attribute of each object, and rejecting the objects with the test
- succeeding.
-
- If no test is specified, the attribute's value will be evaluated as
- a boolean.
-
- .. sourcecode:: jinja
-
- {{ users|rejectattr("is_active") }}
- {{ users|rejectattr("email", "none") }}
-
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: x, True)
+
+
+@contextfilter
+def do_rejectattr(*args, **kwargs):
+ """Filters a sequence of objects by applying a test to the specified
+ attribute of each object, and rejecting the objects with the test
+ succeeding.
+
+ If no test is specified, the attribute's value will be evaluated as
+ a boolean.
+
+ .. sourcecode:: jinja
+
+ {{ users|rejectattr("is_active") }}
+ {{ users|rejectattr("email", "none") }}
+
Similar to a generator comprehension such as:
.. code-block:: python
@@ -1220,112 +1220,112 @@ def do_rejectattr(*args, **kwargs):
(u for user in users if not user.is_active)
(u for user in users if not test_none(user.email))
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: not x, True)
-
-
-@evalcontextfilter
-def do_tojson(eval_ctx, value, indent=None):
- """Dumps a structure to JSON so that it's safe to use in ``<script>``
- tags. It accepts the same arguments and returns a JSON string. Note that
- this is available in templates through the ``|tojson`` filter which will
- also mark the result as safe. Due to how this function escapes certain
- characters this is safe even if used outside of ``<script>`` tags.
-
- The following characters are escaped in strings:
-
- - ``<``
- - ``>``
- - ``&``
- - ``'``
-
- This makes it safe to embed such strings in any place in HTML with the
- notable exception of double quoted attributes. In that case single
- quote your attributes or HTML escape it in addition.
-
- The indent parameter can be used to enable pretty printing. Set it to
- the number of spaces that the structures should be indented with.
-
- Note that this filter is for use in HTML contexts only.
-
- .. versionadded:: 2.9
- """
- policies = eval_ctx.environment.policies
+ .. versionadded:: 2.7
+ """
+ return select_or_reject(args, kwargs, lambda x: not x, True)
+
+
+@evalcontextfilter
+def do_tojson(eval_ctx, value, indent=None):
+ """Dumps a structure to JSON so that it's safe to use in ``<script>``
+ tags. It accepts the same arguments and returns a JSON string. Note that
+ this is available in templates through the ``|tojson`` filter which will
+ also mark the result as safe. Due to how this function escapes certain
+ characters this is safe even if used outside of ``<script>`` tags.
+
+ The following characters are escaped in strings:
+
+ - ``<``
+ - ``>``
+ - ``&``
+ - ``'``
+
+ This makes it safe to embed such strings in any place in HTML with the
+ notable exception of double quoted attributes. In that case single
+ quote your attributes or HTML escape it in addition.
+
+ The indent parameter can be used to enable pretty printing. Set it to
+ the number of spaces that the structures should be indented with.
+
+ Note that this filter is for use in HTML contexts only.
+
+ .. versionadded:: 2.9
+ """
+ policies = eval_ctx.environment.policies
dumper = policies["json.dumps_function"]
options = policies["json.dumps_kwargs"]
- if indent is not None:
- options = dict(options)
+ if indent is not None:
+ options = dict(options)
options["indent"] = indent
- return htmlsafe_json_dumps(value, dumper=dumper, **options)
-
-
-def prepare_map(args, kwargs):
- context = args[0]
- seq = args[1]
+ return htmlsafe_json_dumps(value, dumper=dumper, **options)
+
+
+def prepare_map(args, kwargs):
+ context = args[0]
+ seq = args[1]
default = None
-
+
if len(args) == 2 and "attribute" in kwargs:
attribute = kwargs.pop("attribute")
default = kwargs.pop("default", None)
- if kwargs:
+ if kwargs:
raise FilterArgumentError(
"Unexpected keyword argument %r" % next(iter(kwargs))
)
func = make_attrgetter(context.environment, attribute, default=default)
- else:
- try:
- name = args[2]
- args = args[3:]
- except LookupError:
+ else:
+ try:
+ name = args[2]
+ args = args[3:]
+ except LookupError:
raise FilterArgumentError("map requires a filter argument")
-
+
def func(item):
return context.environment.call_filter(
name, item, args, kwargs, context=context
)
- return seq, func
-
-
-def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr):
- context = args[0]
- seq = args[1]
- if lookup_attr:
- try:
- attr = args[2]
- except LookupError:
+ return seq, func
+
+
+def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr):
+ context = args[0]
+ seq = args[1]
+ if lookup_attr:
+ try:
+ attr = args[2]
+ except LookupError:
raise FilterArgumentError("Missing parameter for attribute name")
- transfunc = make_attrgetter(context.environment, attr)
- off = 1
- else:
- off = 0
-
+ transfunc = make_attrgetter(context.environment, attr)
+ off = 1
+ else:
+ off = 0
+
def transfunc(x):
return x
- try:
- name = args[2 + off]
+ try:
+ name = args[2 + off]
args = args[3 + off :]
def func(item):
return context.environment.call_test(name, item, args, kwargs)
- except LookupError:
- func = bool
-
- return seq, lambda item: modfunc(func(transfunc(item)))
-
-
-def select_or_reject(args, kwargs, modfunc, lookup_attr):
- seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
- if seq:
- for item in seq:
- if func(item):
- yield item
-
-
-FILTERS = {
+ except LookupError:
+ func = bool
+
+ return seq, lambda item: modfunc(func(transfunc(item)))
+
+
+def select_or_reject(args, kwargs, modfunc, lookup_attr):
+ seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
+ if seq:
+ for item in seq:
+ if func(item):
+ yield item
+
+
+FILTERS = {
"abs": abs,
"attr": do_attr,
"batch": do_batch,
@@ -1379,4 +1379,4 @@ FILTERS = {
"wordwrap": do_wordwrap,
"xmlattr": do_xmlattr,
"tojson": do_tojson,
-}
+}
diff --git a/contrib/python/Jinja2/py2/jinja2/idtracking.py b/contrib/python/Jinja2/py2/jinja2/idtracking.py
index 9a0d838017..059b1fad0c 100644
--- a/contrib/python/Jinja2/py2/jinja2/idtracking.py
+++ b/contrib/python/Jinja2/py2/jinja2/idtracking.py
@@ -1,161 +1,161 @@
from ._compat import iteritems
from .visitor import NodeVisitor
-
+
VAR_LOAD_PARAMETER = "param"
VAR_LOAD_RESOLVE = "resolve"
VAR_LOAD_ALIAS = "alias"
VAR_LOAD_UNDEFINED = "undefined"
-
-
-def find_symbols(nodes, parent_symbols=None):
- sym = Symbols(parent=parent_symbols)
- visitor = FrameSymbolVisitor(sym)
- for node in nodes:
- visitor.visit(node)
- return sym
-
-
-def symbols_for_node(node, parent_symbols=None):
- sym = Symbols(parent=parent_symbols)
- sym.analyze_node(node)
- return sym
-
-
-class Symbols(object):
- def __init__(self, parent=None, level=None):
- if level is None:
- if parent is None:
- level = 0
- else:
- level = parent.level + 1
- self.level = level
- self.parent = parent
- self.refs = {}
- self.loads = {}
- self.stores = set()
-
- def analyze_node(self, node, **kwargs):
- visitor = RootVisitor(self)
- visitor.visit(node, **kwargs)
-
- def _define_ref(self, name, load=None):
+
+
+def find_symbols(nodes, parent_symbols=None):
+ sym = Symbols(parent=parent_symbols)
+ visitor = FrameSymbolVisitor(sym)
+ for node in nodes:
+ visitor.visit(node)
+ return sym
+
+
+def symbols_for_node(node, parent_symbols=None):
+ sym = Symbols(parent=parent_symbols)
+ sym.analyze_node(node)
+ return sym
+
+
+class Symbols(object):
+ def __init__(self, parent=None, level=None):
+ if level is None:
+ if parent is None:
+ level = 0
+ else:
+ level = parent.level + 1
+ self.level = level
+ self.parent = parent
+ self.refs = {}
+ self.loads = {}
+ self.stores = set()
+
+ def analyze_node(self, node, **kwargs):
+ visitor = RootVisitor(self)
+ visitor.visit(node, **kwargs)
+
+ def _define_ref(self, name, load=None):
ident = "l_%d_%s" % (self.level, name)
- self.refs[name] = ident
- if load is not None:
- self.loads[ident] = load
- return ident
-
- def find_load(self, target):
- if target in self.loads:
- return self.loads[target]
- if self.parent is not None:
- return self.parent.find_load(target)
-
- def find_ref(self, name):
- if name in self.refs:
- return self.refs[name]
- if self.parent is not None:
- return self.parent.find_ref(name)
-
- def ref(self, name):
- rv = self.find_ref(name)
- if rv is None:
+ self.refs[name] = ident
+ if load is not None:
+ self.loads[ident] = load
+ return ident
+
+ def find_load(self, target):
+ if target in self.loads:
+ return self.loads[target]
+ if self.parent is not None:
+ return self.parent.find_load(target)
+
+ def find_ref(self, name):
+ if name in self.refs:
+ return self.refs[name]
+ if self.parent is not None:
+ return self.parent.find_ref(name)
+
+ def ref(self, name):
+ rv = self.find_ref(name)
+ if rv is None:
raise AssertionError(
"Tried to resolve a name to a reference that "
"was unknown to the frame (%r)" % name
)
- return rv
-
- def copy(self):
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.refs = self.refs.copy()
- rv.loads = self.loads.copy()
- rv.stores = self.stores.copy()
- return rv
-
- def store(self, name):
- self.stores.add(name)
-
- # If we have not see the name referenced yet, we need to figure
- # out what to set it to.
- if name not in self.refs:
- # If there is a parent scope we check if the name has a
- # reference there. If it does it means we might have to alias
- # to a variable there.
- if self.parent is not None:
- outer_ref = self.parent.find_ref(name)
- if outer_ref is not None:
- self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
- return
-
- # Otherwise we can just set it to undefined.
- self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
-
- def declare_parameter(self, name):
- self.stores.add(name)
- return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
-
- def load(self, name):
- target = self.find_ref(name)
- if target is None:
- self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
-
- def branch_update(self, branch_symbols):
- stores = {}
- for branch in branch_symbols:
- for target in branch.stores:
- if target in self.stores:
- continue
- stores[target] = stores.get(target, 0) + 1
-
- for sym in branch_symbols:
- self.refs.update(sym.refs)
- self.loads.update(sym.loads)
- self.stores.update(sym.stores)
-
- for name, branch_count in iteritems(stores):
- if branch_count == len(branch_symbols):
- continue
- target = self.find_ref(name)
+ return rv
+
+ def copy(self):
+ rv = object.__new__(self.__class__)
+ rv.__dict__.update(self.__dict__)
+ rv.refs = self.refs.copy()
+ rv.loads = self.loads.copy()
+ rv.stores = self.stores.copy()
+ return rv
+
+ def store(self, name):
+ self.stores.add(name)
+
+ # If we have not see the name referenced yet, we need to figure
+ # out what to set it to.
+ if name not in self.refs:
+ # If there is a parent scope we check if the name has a
+ # reference there. If it does it means we might have to alias
+ # to a variable there.
+ if self.parent is not None:
+ outer_ref = self.parent.find_ref(name)
+ if outer_ref is not None:
+ self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
+ return
+
+ # Otherwise we can just set it to undefined.
+ self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
+
+ def declare_parameter(self, name):
+ self.stores.add(name)
+ return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
+
+ def load(self, name):
+ target = self.find_ref(name)
+ if target is None:
+ self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
+
+ def branch_update(self, branch_symbols):
+ stores = {}
+ for branch in branch_symbols:
+ for target in branch.stores:
+ if target in self.stores:
+ continue
+ stores[target] = stores.get(target, 0) + 1
+
+ for sym in branch_symbols:
+ self.refs.update(sym.refs)
+ self.loads.update(sym.loads)
+ self.stores.update(sym.stores)
+
+ for name, branch_count in iteritems(stores):
+ if branch_count == len(branch_symbols):
+ continue
+ target = self.find_ref(name)
assert target is not None, "should not happen"
-
- if self.parent is not None:
- outer_target = self.parent.find_ref(name)
- if outer_target is not None:
- self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
- continue
- self.loads[target] = (VAR_LOAD_RESOLVE, name)
-
- def dump_stores(self):
- rv = {}
- node = self
- while node is not None:
- for name in node.stores:
- if name not in rv:
- rv[name] = self.find_ref(name)
- node = node.parent
- return rv
-
- def dump_param_targets(self):
- rv = set()
- node = self
- while node is not None:
- for target, (instr, _) in iteritems(self.loads):
- if instr == VAR_LOAD_PARAMETER:
- rv.add(target)
- node = node.parent
- return rv
-
-
-class RootVisitor(NodeVisitor):
- def __init__(self, symbols):
- self.sym_visitor = FrameSymbolVisitor(symbols)
-
- def _simple_visit(self, node, **kwargs):
- for child in node.iter_child_nodes():
- self.sym_visitor.visit(child)
-
+
+ if self.parent is not None:
+ outer_target = self.parent.find_ref(name)
+ if outer_target is not None:
+ self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
+ continue
+ self.loads[target] = (VAR_LOAD_RESOLVE, name)
+
+ def dump_stores(self):
+ rv = {}
+ node = self
+ while node is not None:
+ for name in node.stores:
+ if name not in rv:
+ rv[name] = self.find_ref(name)
+ node = node.parent
+ return rv
+
+ def dump_param_targets(self):
+ rv = set()
+ node = self
+ while node is not None:
+ for target, (instr, _) in iteritems(self.loads):
+ if instr == VAR_LOAD_PARAMETER:
+ rv.add(target)
+ node = node.parent
+ return rv
+
+
+class RootVisitor(NodeVisitor):
+ def __init__(self, symbols):
+ self.sym_visitor = FrameSymbolVisitor(symbols)
+
+ def _simple_visit(self, node, **kwargs):
+ for child in node.iter_child_nodes():
+ self.sym_visitor.visit(child)
+
visit_Template = (
visit_Block
) = (
@@ -163,128 +163,128 @@ class RootVisitor(NodeVisitor):
) = (
visit_FilterBlock
) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
-
- def visit_AssignBlock(self, node, **kwargs):
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def visit_CallBlock(self, node, **kwargs):
+
+ def visit_AssignBlock(self, node, **kwargs):
+ for child in node.body:
+ self.sym_visitor.visit(child)
+
+ def visit_CallBlock(self, node, **kwargs):
for child in node.iter_child_nodes(exclude=("call",)):
- self.sym_visitor.visit(child)
-
- def visit_OverlayScope(self, node, **kwargs):
- for child in node.body:
- self.sym_visitor.visit(child)
-
+ self.sym_visitor.visit(child)
+
+ def visit_OverlayScope(self, node, **kwargs):
+ for child in node.body:
+ self.sym_visitor.visit(child)
+
def visit_For(self, node, for_branch="body", **kwargs):
if for_branch == "body":
- self.sym_visitor.visit(node.target, store_as_param=True)
- branch = node.body
+ self.sym_visitor.visit(node.target, store_as_param=True)
+ branch = node.body
elif for_branch == "else":
- branch = node.else_
+ branch = node.else_
elif for_branch == "test":
- self.sym_visitor.visit(node.target, store_as_param=True)
- if node.test is not None:
- self.sym_visitor.visit(node.test)
- return
- else:
+ self.sym_visitor.visit(node.target, store_as_param=True)
+ if node.test is not None:
+ self.sym_visitor.visit(node.test)
+ return
+ else:
raise RuntimeError("Unknown for branch")
- for item in branch or ():
- self.sym_visitor.visit(item)
-
- def visit_With(self, node, **kwargs):
- for target in node.targets:
- self.sym_visitor.visit(target)
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def generic_visit(self, node, *args, **kwargs):
+ for item in branch or ():
+ self.sym_visitor.visit(item)
+
+ def visit_With(self, node, **kwargs):
+ for target in node.targets:
+ self.sym_visitor.visit(target)
+ for child in node.body:
+ self.sym_visitor.visit(child)
+
+ def generic_visit(self, node, *args, **kwargs):
raise NotImplementedError(
"Cannot find symbols for %r" % node.__class__.__name__
)
-
-
-class FrameSymbolVisitor(NodeVisitor):
- """A visitor for `Frame.inspect`."""
-
- def __init__(self, symbols):
- self.symbols = symbols
-
- def visit_Name(self, node, store_as_param=False, **kwargs):
- """All assignments to names go through this function."""
+
+
+class FrameSymbolVisitor(NodeVisitor):
+ """A visitor for `Frame.inspect`."""
+
+ def __init__(self, symbols):
+ self.symbols = symbols
+
+ def visit_Name(self, node, store_as_param=False, **kwargs):
+ """All assignments to names go through this function."""
if store_as_param or node.ctx == "param":
- self.symbols.declare_parameter(node.name)
+ self.symbols.declare_parameter(node.name)
elif node.ctx == "store":
- self.symbols.store(node.name)
+ self.symbols.store(node.name)
elif node.ctx == "load":
- self.symbols.load(node.name)
-
- def visit_NSRef(self, node, **kwargs):
- self.symbols.load(node.name)
-
- def visit_If(self, node, **kwargs):
- self.visit(node.test, **kwargs)
-
- original_symbols = self.symbols
-
- def inner_visit(nodes):
- self.symbols = rv = original_symbols.copy()
- for subnode in nodes:
- self.visit(subnode, **kwargs)
- self.symbols = original_symbols
- return rv
-
- body_symbols = inner_visit(node.body)
- elif_symbols = inner_visit(node.elif_)
- else_symbols = inner_visit(node.else_ or ())
-
- self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
-
- def visit_Macro(self, node, **kwargs):
- self.symbols.store(node.name)
-
- def visit_Import(self, node, **kwargs):
- self.generic_visit(node, **kwargs)
- self.symbols.store(node.target)
-
- def visit_FromImport(self, node, **kwargs):
- self.generic_visit(node, **kwargs)
- for name in node.names:
- if isinstance(name, tuple):
- self.symbols.store(name[1])
- else:
- self.symbols.store(name)
-
- def visit_Assign(self, node, **kwargs):
- """Visit assignments in the correct order."""
- self.visit(node.node, **kwargs)
- self.visit(node.target, **kwargs)
-
- def visit_For(self, node, **kwargs):
- """Visiting stops at for blocks. However the block sequence
- is visited as part of the outer scope.
- """
- self.visit(node.iter, **kwargs)
-
- def visit_CallBlock(self, node, **kwargs):
- self.visit(node.call, **kwargs)
-
- def visit_FilterBlock(self, node, **kwargs):
- self.visit(node.filter, **kwargs)
-
- def visit_With(self, node, **kwargs):
- for target in node.values:
- self.visit(target)
-
- def visit_AssignBlock(self, node, **kwargs):
- """Stop visiting at block assigns."""
- self.visit(node.target, **kwargs)
-
- def visit_Scope(self, node, **kwargs):
- """Stop visiting at scopes."""
-
- def visit_Block(self, node, **kwargs):
- """Stop visiting at blocks."""
-
- def visit_OverlayScope(self, node, **kwargs):
- """Do not visit into overlay scopes."""
+ self.symbols.load(node.name)
+
+ def visit_NSRef(self, node, **kwargs):
+ self.symbols.load(node.name)
+
+ def visit_If(self, node, **kwargs):
+ self.visit(node.test, **kwargs)
+
+ original_symbols = self.symbols
+
+ def inner_visit(nodes):
+ self.symbols = rv = original_symbols.copy()
+ for subnode in nodes:
+ self.visit(subnode, **kwargs)
+ self.symbols = original_symbols
+ return rv
+
+ body_symbols = inner_visit(node.body)
+ elif_symbols = inner_visit(node.elif_)
+ else_symbols = inner_visit(node.else_ or ())
+
+ self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
+
+ def visit_Macro(self, node, **kwargs):
+ self.symbols.store(node.name)
+
+ def visit_Import(self, node, **kwargs):
+ self.generic_visit(node, **kwargs)
+ self.symbols.store(node.target)
+
+ def visit_FromImport(self, node, **kwargs):
+ self.generic_visit(node, **kwargs)
+ for name in node.names:
+ if isinstance(name, tuple):
+ self.symbols.store(name[1])
+ else:
+ self.symbols.store(name)
+
+ def visit_Assign(self, node, **kwargs):
+ """Visit assignments in the correct order."""
+ self.visit(node.node, **kwargs)
+ self.visit(node.target, **kwargs)
+
+ def visit_For(self, node, **kwargs):
+ """Visiting stops at for blocks. However the block sequence
+ is visited as part of the outer scope.
+ """
+ self.visit(node.iter, **kwargs)
+
+ def visit_CallBlock(self, node, **kwargs):
+ self.visit(node.call, **kwargs)
+
+ def visit_FilterBlock(self, node, **kwargs):
+ self.visit(node.filter, **kwargs)
+
+ def visit_With(self, node, **kwargs):
+ for target in node.values:
+ self.visit(target)
+
+ def visit_AssignBlock(self, node, **kwargs):
+ """Stop visiting at block assigns."""
+ self.visit(node.target, **kwargs)
+
+ def visit_Scope(self, node, **kwargs):
+ """Stop visiting at scopes."""
+
+ def visit_Block(self, node, **kwargs):
+ """Stop visiting at blocks."""
+
+ def visit_OverlayScope(self, node, **kwargs):
+ """Do not visit into overlay scopes."""
diff --git a/contrib/python/Jinja2/py2/jinja2/lexer.py b/contrib/python/Jinja2/py2/jinja2/lexer.py
index 552356a12d..075484b88b 100644
--- a/contrib/python/Jinja2/py2/jinja2/lexer.py
+++ b/contrib/python/Jinja2/py2/jinja2/lexer.py
@@ -1,26 +1,26 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
is used to do some preprocessing. It filters out invalid operators like
the bitshift operators we don't allow in templates. It separates
template code and python code in expressions.
-"""
-import re
+"""
+import re
from ast import literal_eval
-from collections import deque
-from operator import itemgetter
-
+from collections import deque
+from operator import itemgetter
+
from ._compat import implements_iterator
from ._compat import intern
from ._compat import iteritems
from ._compat import text_type
from .exceptions import TemplateSyntaxError
from .utils import LRUCache
-
-# cache for the lexers. Exists in order to be able to have multiple
-# environments with the same lexer
-_lexer_cache = LRUCache(50)
-
-# static regular expressions
+
+# cache for the lexers. Exists in order to be able to have multiple
+# environments with the same lexer
+_lexer_cache = LRUCache(50)
+
+# static regular expressions
whitespace_re = re.compile(r"\s+", re.U)
newline_re = re.compile(r"(\r\n|\r|\n)")
string_re = re.compile(
@@ -40,22 +40,22 @@ float_re = re.compile(
""",
re.IGNORECASE | re.VERBOSE,
)
-
-try:
- # check if this Python supports Unicode identifiers
+
+try:
+ # check if this Python supports Unicode identifiers
compile("föö", "<unknown>", "eval")
-except SyntaxError:
+except SyntaxError:
# Python 2, no Unicode support, use ASCII identifiers
name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*")
- check_ident = False
-else:
+ check_ident = False
+else:
# Unicode support, import generated re pattern and set flag to use
# str.isidentifier to validate during lexing.
from ._identifier import pattern as name_re
- check_ident = True
-
-# internal the tokens and keep references to them
+ check_ident = True
+
+# internal the tokens and keep references to them
TOKEN_ADD = intern("add")
TOKEN_ASSIGN = intern("assign")
TOKEN_COLON = intern("colon")
@@ -105,9 +105,9 @@ TOKEN_LINECOMMENT = intern("linecomment")
TOKEN_DATA = intern("data")
TOKEN_INITIAL = intern("initial")
TOKEN_EOF = intern("eof")
-
-# bind operators to token types
-operators = {
+
+# bind operators to token types
+operators = {
"+": TOKEN_ADD,
"-": TOKEN_SUB,
"/": TOKEN_DIV,
@@ -134,14 +134,14 @@ operators = {
"|": TOKEN_PIPE,
",": TOKEN_COMMA,
";": TOKEN_SEMICOLON,
-}
-
-reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
+}
+
+reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
assert len(operators) == len(reverse_operators), "operators dropped"
operator_re = re.compile(
"(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))
)
-
+
ignored_tokens = frozenset(
[
TOKEN_COMMENT_BEGIN,
@@ -156,12 +156,12 @@ ignored_tokens = frozenset(
ignore_if_empty = frozenset(
[TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
)
-
-
-def _describe_token_type(token_type):
- if token_type in reverse_operators:
- return reverse_operators[token_type]
- return {
+
+
+def _describe_token_type(token_type):
+ if token_type in reverse_operators:
+ return reverse_operators[token_type]
+ return {
TOKEN_COMMENT_BEGIN: "begin of comment",
TOKEN_COMMENT_END: "end of comment",
TOKEN_COMMENT: "comment",
@@ -174,38 +174,38 @@ def _describe_token_type(token_type):
TOKEN_LINESTATEMENT_END: "end of line statement",
TOKEN_DATA: "template data / text",
TOKEN_EOF: "end of template",
- }.get(token_type, token_type)
-
-
-def describe_token(token):
- """Returns a description of the token."""
+ }.get(token_type, token_type)
+
+
+def describe_token(token):
+ """Returns a description of the token."""
if token.type == TOKEN_NAME:
- return token.value
- return _describe_token_type(token.type)
-
-
-def describe_token_expr(expr):
- """Like `describe_token` but for token expressions."""
+ return token.value
+ return _describe_token_type(token.type)
+
+
+def describe_token_expr(expr):
+ """Like `describe_token` but for token expressions."""
if ":" in expr:
type, value = expr.split(":", 1)
if type == TOKEN_NAME:
- return value
- else:
- type = expr
- return _describe_token_type(type)
-
-
-def count_newlines(value):
- """Count the number of newline characters in the string. This is
- useful for extensions that filter a stream.
- """
- return len(newline_re.findall(value))
-
-
-def compile_rules(environment):
- """Compiles all the rules from the environment into a list of rules."""
- e = re.escape
- rules = [
+ return value
+ else:
+ type = expr
+ return _describe_token_type(type)
+
+
+def count_newlines(value):
+ """Count the number of newline characters in the string. This is
+ useful for extensions that filter a stream.
+ """
+ return len(newline_re.findall(value))
+
+
+def compile_rules(environment):
+ """Compiles all the rules from the environment into a list of rules."""
+ e = re.escape
+ rules = [
(
len(environment.comment_start_string),
TOKEN_COMMENT_BEGIN,
@@ -221,9 +221,9 @@ def compile_rules(environment):
TOKEN_VARIABLE_BEGIN,
e(environment.variable_start_string),
),
- ]
-
- if environment.line_statement_prefix is not None:
+ ]
+
+ if environment.line_statement_prefix is not None:
rules.append(
(
len(environment.line_statement_prefix),
@@ -231,7 +231,7 @@ def compile_rules(environment):
r"^[ \t\v]*" + e(environment.line_statement_prefix),
)
)
- if environment.line_comment_prefix is not None:
+ if environment.line_comment_prefix is not None:
rules.append(
(
len(environment.line_comment_prefix),
@@ -239,169 +239,169 @@ def compile_rules(environment):
r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
)
)
-
- return [x[1:] for x in sorted(rules, reverse=True)]
-
-
-class Failure(object):
- """Class that raises a `TemplateSyntaxError` if called.
- Used by the `Lexer` to specify known errors.
- """
-
- def __init__(self, message, cls=TemplateSyntaxError):
- self.message = message
- self.error_class = cls
-
- def __call__(self, lineno, filename):
- raise self.error_class(self.message, lineno, filename)
-
-
-class Token(tuple):
- """Token class."""
-
- __slots__ = ()
- lineno, type, value = (property(itemgetter(x)) for x in range(3))
-
- def __new__(cls, lineno, type, value):
- return tuple.__new__(cls, (lineno, intern(str(type)), value))
-
- def __str__(self):
- if self.type in reverse_operators:
- return reverse_operators[self.type]
+
+ return [x[1:] for x in sorted(rules, reverse=True)]
+
+
+class Failure(object):
+ """Class that raises a `TemplateSyntaxError` if called.
+ Used by the `Lexer` to specify known errors.
+ """
+
+ def __init__(self, message, cls=TemplateSyntaxError):
+ self.message = message
+ self.error_class = cls
+
+ def __call__(self, lineno, filename):
+ raise self.error_class(self.message, lineno, filename)
+
+
+class Token(tuple):
+ """Token class."""
+
+ __slots__ = ()
+ lineno, type, value = (property(itemgetter(x)) for x in range(3))
+
+ def __new__(cls, lineno, type, value):
+ return tuple.__new__(cls, (lineno, intern(str(type)), value))
+
+ def __str__(self):
+ if self.type in reverse_operators:
+ return reverse_operators[self.type]
elif self.type == "name":
- return self.value
- return self.type
-
- def test(self, expr):
- """Test a token against a token expression. This can either be a
- token type or ``'token_type:token_value'``. This can only test
- against string values and types.
- """
- # here we do a regular string equality check as test_any is usually
- # passed an iterable of not interned strings.
- if self.type == expr:
- return True
+ return self.value
+ return self.type
+
+ def test(self, expr):
+ """Test a token against a token expression. This can either be a
+ token type or ``'token_type:token_value'``. This can only test
+ against string values and types.
+ """
+ # here we do a regular string equality check as test_any is usually
+ # passed an iterable of not interned strings.
+ if self.type == expr:
+ return True
elif ":" in expr:
return expr.split(":", 1) == [self.type, self.value]
- return False
-
- def test_any(self, *iterable):
- """Test against multiple token expressions."""
- for expr in iterable:
- if self.test(expr):
- return True
- return False
-
- def __repr__(self):
+ return False
+
+ def test_any(self, *iterable):
+ """Test against multiple token expressions."""
+ for expr in iterable:
+ if self.test(expr):
+ return True
+ return False
+
+ def __repr__(self):
return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value)
-
-
-@implements_iterator
-class TokenStreamIterator(object):
- """The iterator for tokenstreams. Iterate over the stream
- until the eof token is reached.
- """
-
- def __init__(self, stream):
- self.stream = stream
-
- def __iter__(self):
- return self
-
- def __next__(self):
- token = self.stream.current
- if token.type is TOKEN_EOF:
- self.stream.close()
- raise StopIteration()
- next(self.stream)
- return token
-
-
-@implements_iterator
-class TokenStream(object):
- """A token stream is an iterable that yields :class:`Token`\\s. The
- parser however does not iterate over it but calls :meth:`next` to go
- one token ahead. The current active token is stored as :attr:`current`.
- """
-
- def __init__(self, generator, name, filename):
- self._iter = iter(generator)
- self._pushed = deque()
- self.name = name
- self.filename = filename
- self.closed = False
+
+
+@implements_iterator
+class TokenStreamIterator(object):
+ """The iterator for tokenstreams. Iterate over the stream
+ until the eof token is reached.
+ """
+
+ def __init__(self, stream):
+ self.stream = stream
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ token = self.stream.current
+ if token.type is TOKEN_EOF:
+ self.stream.close()
+ raise StopIteration()
+ next(self.stream)
+ return token
+
+
+@implements_iterator
+class TokenStream(object):
+ """A token stream is an iterable that yields :class:`Token`\\s. The
+ parser however does not iterate over it but calls :meth:`next` to go
+ one token ahead. The current active token is stored as :attr:`current`.
+ """
+
+ def __init__(self, generator, name, filename):
+ self._iter = iter(generator)
+ self._pushed = deque()
+ self.name = name
+ self.filename = filename
+ self.closed = False
self.current = Token(1, TOKEN_INITIAL, "")
- next(self)
-
- def __iter__(self):
- return TokenStreamIterator(self)
-
- def __bool__(self):
- return bool(self._pushed) or self.current.type is not TOKEN_EOF
-
- __nonzero__ = __bool__ # py2
-
+ next(self)
+
+ def __iter__(self):
+ return TokenStreamIterator(self)
+
+ def __bool__(self):
+ return bool(self._pushed) or self.current.type is not TOKEN_EOF
+
+ __nonzero__ = __bool__ # py2
+
@property
def eos(self):
"""Are we at the end of the stream?"""
return not self
-
- def push(self, token):
- """Push a token back to the stream."""
- self._pushed.append(token)
-
- def look(self):
- """Look at the next token."""
- old_token = next(self)
- result = self.current
- self.push(result)
- self.current = old_token
- return result
-
- def skip(self, n=1):
- """Got n tokens ahead."""
+
+ def push(self, token):
+ """Push a token back to the stream."""
+ self._pushed.append(token)
+
+ def look(self):
+ """Look at the next token."""
+ old_token = next(self)
+ result = self.current
+ self.push(result)
+ self.current = old_token
+ return result
+
+ def skip(self, n=1):
+ """Got n tokens ahead."""
for _ in range(n):
- next(self)
-
- def next_if(self, expr):
- """Perform the token test and return the token if it matched.
- Otherwise the return value is `None`.
- """
- if self.current.test(expr):
- return next(self)
-
- def skip_if(self, expr):
- """Like :meth:`next_if` but only returns `True` or `False`."""
- return self.next_if(expr) is not None
-
- def __next__(self):
- """Go one token ahead and return the old one.
-
- Use the built-in :func:`next` instead of calling this directly.
- """
- rv = self.current
- if self._pushed:
- self.current = self._pushed.popleft()
- elif self.current.type is not TOKEN_EOF:
- try:
- self.current = next(self._iter)
- except StopIteration:
- self.close()
- return rv
-
- def close(self):
- """Close the stream."""
+ next(self)
+
+ def next_if(self, expr):
+ """Perform the token test and return the token if it matched.
+ Otherwise the return value is `None`.
+ """
+ if self.current.test(expr):
+ return next(self)
+
+ def skip_if(self, expr):
+ """Like :meth:`next_if` but only returns `True` or `False`."""
+ return self.next_if(expr) is not None
+
+ def __next__(self):
+ """Go one token ahead and return the old one.
+
+ Use the built-in :func:`next` instead of calling this directly.
+ """
+ rv = self.current
+ if self._pushed:
+ self.current = self._pushed.popleft()
+ elif self.current.type is not TOKEN_EOF:
+ try:
+ self.current = next(self._iter)
+ except StopIteration:
+ self.close()
+ return rv
+
+ def close(self):
+ """Close the stream."""
self.current = Token(self.current.lineno, TOKEN_EOF, "")
- self._iter = None
- self.closed = True
-
- def expect(self, expr):
- """Expect a given token type and return it. This accepts the same
- argument as :meth:`jinja2.lexer.Token.test`.
- """
- if not self.current.test(expr):
- expr = describe_token_expr(expr)
- if self.current.type is TOKEN_EOF:
+ self._iter = None
+ self.closed = True
+
+ def expect(self, expr):
+ """Expect a given token type and return it. This accepts the same
+ argument as :meth:`jinja2.lexer.Token.test`.
+ """
+ if not self.current.test(expr):
+ expr = describe_token_expr(expr)
+ if self.current.type is TOKEN_EOF:
raise TemplateSyntaxError(
"unexpected end of template, expected %r." % expr,
self.current.lineno,
@@ -414,14 +414,14 @@ class TokenStream(object):
self.name,
self.filename,
)
- try:
- return self.current
- finally:
- next(self)
-
-
-def get_lexer(environment):
- """Return a lexer which is probably cached."""
+ try:
+ return self.current
+ finally:
+ next(self)
+
+
+def get_lexer(environment):
+ """Return a lexer which is probably cached."""
key = (
environment.block_start_string,
environment.block_end_string,
@@ -436,13 +436,13 @@ def get_lexer(environment):
environment.newline_sequence,
environment.keep_trailing_newline,
)
- lexer = _lexer_cache.get(key)
- if lexer is None:
- lexer = Lexer(environment)
- _lexer_cache[key] = lexer
- return lexer
-
-
+ lexer = _lexer_cache.get(key)
+ if lexer is None:
+ lexer = Lexer(environment)
+ _lexer_cache[key] = lexer
+ return lexer
+
+
class OptionalLStrip(tuple):
"""A special tuple for marking a point in the state that can have
lstrip applied.
@@ -456,53 +456,53 @@ class OptionalLStrip(tuple):
return super(OptionalLStrip, cls).__new__(cls, members)
-class Lexer(object):
- """Class that implements a lexer for a given environment. Automatically
- created by the environment class, usually you don't have to do that.
-
- Note that the lexer is not automatically bound to an environment.
- Multiple environments can share the same lexer.
- """
-
- def __init__(self, environment):
- # shortcuts
- e = re.escape
-
+class Lexer(object):
+ """Class that implements a lexer for a given environment. Automatically
+ created by the environment class, usually you don't have to do that.
+
+ Note that the lexer is not automatically bound to an environment.
+ Multiple environments can share the same lexer.
+ """
+
+ def __init__(self, environment):
+ # shortcuts
+ e = re.escape
+
def c(x):
return re.compile(x, re.M | re.S)
- # lexing rules for tags
- tag_rules = [
- (whitespace_re, TOKEN_WHITESPACE, None),
- (float_re, TOKEN_FLOAT, None),
- (integer_re, TOKEN_INTEGER, None),
- (name_re, TOKEN_NAME, None),
- (string_re, TOKEN_STRING, None),
+ # lexing rules for tags
+ tag_rules = [
+ (whitespace_re, TOKEN_WHITESPACE, None),
+ (float_re, TOKEN_FLOAT, None),
+ (integer_re, TOKEN_INTEGER, None),
+ (name_re, TOKEN_NAME, None),
+ (string_re, TOKEN_STRING, None),
(operator_re, TOKEN_OPERATOR, None),
- ]
-
- # assemble the root lexing rule. because "|" is ungreedy
- # we have to sort by length so that the lexer continues working
- # as expected when we have parsing rules like <% for block and
- # <%= for variables. (if someone wants asp like syntax)
- # variables are just part of the rules if variable processing
- # is required.
- root_tag_rules = compile_rules(environment)
-
- # block suffix if trimming is enabled
+ ]
+
+ # assemble the root lexing rule. because "|" is ungreedy
+ # we have to sort by length so that the lexer continues working
+ # as expected when we have parsing rules like <% for block and
+ # <%= for variables. (if someone wants asp like syntax)
+ # variables are just part of the rules if variable processing
+ # is required.
+ root_tag_rules = compile_rules(environment)
+
+ # block suffix if trimming is enabled
block_suffix_re = environment.trim_blocks and "\\n?" or ""
-
+
# If lstrip is enabled, it should not be applied if there is any
# non-whitespace between the newline and block.
self.lstrip_unless_re = c(r"[^ \t]") if environment.lstrip_blocks else None
-
- self.newline_sequence = environment.newline_sequence
- self.keep_trailing_newline = environment.keep_trailing_newline
-
- # global lexing rules
- self.rules = {
+
+ self.newline_sequence = environment.newline_sequence
+ self.keep_trailing_newline = environment.keep_trailing_newline
+
+ # global lexing rules
+ self.rules = {
"root": [
- # directives
+ # directives
(
c(
"(.*?)(?:%s)"
@@ -524,11 +524,11 @@ class Lexer(object):
OptionalLStrip(TOKEN_DATA, "#bygroup"),
"#bygroup",
),
- # data
+ # data
(c(".+"), TOKEN_DATA, None),
- ],
- # comments
- TOKEN_COMMENT_BEGIN: [
+ ],
+ # comments
+ TOKEN_COMMENT_BEGIN: [
(
c(
r"(.*?)((?:\-%s\s*|%s)%s)"
@@ -542,9 +542,9 @@ class Lexer(object):
"#pop",
),
(c("(.)"), (Failure("Missing end of comment tag"),), None),
- ],
- # blocks
- TOKEN_BLOCK_BEGIN: [
+ ],
+ # blocks
+ TOKEN_BLOCK_BEGIN: [
(
c(
r"(?:\-%s\s*|%s)%s"
@@ -559,8 +559,8 @@ class Lexer(object):
),
]
+ tag_rules,
- # variables
- TOKEN_VARIABLE_BEGIN: [
+ # variables
+ TOKEN_VARIABLE_BEGIN: [
(
c(
r"\-%s\s*|%s"
@@ -574,8 +574,8 @@ class Lexer(object):
)
]
+ tag_rules,
- # raw block
- TOKEN_RAW_BEGIN: [
+ # raw block
+ TOKEN_RAW_BEGIN: [
(
c(
r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))"
@@ -590,121 +590,121 @@ class Lexer(object):
"#pop",
),
(c("(.)"), (Failure("Missing end of raw directive"),), None),
- ],
- # line statements
- TOKEN_LINESTATEMENT_BEGIN: [
+ ],
+ # line statements
+ TOKEN_LINESTATEMENT_BEGIN: [
(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
]
+ tag_rules,
- # line comments
- TOKEN_LINECOMMENT_BEGIN: [
+ # line comments
+ TOKEN_LINECOMMENT_BEGIN: [
(
c(r"(.*?)()(?=\n|$)"),
(TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
"#pop",
)
],
- }
-
- def _normalize_newlines(self, value):
- """Called for strings and template data to normalize it to unicode."""
- return newline_re.sub(self.newline_sequence, value)
-
- def tokenize(self, source, name=None, filename=None, state=None):
+ }
+
+ def _normalize_newlines(self, value):
+ """Called for strings and template data to normalize it to unicode."""
+ return newline_re.sub(self.newline_sequence, value)
+
+ def tokenize(self, source, name=None, filename=None, state=None):
"""Calls tokeniter + tokenize and wraps it in a token stream."""
- stream = self.tokeniter(source, name, filename, state)
- return TokenStream(self.wrap(stream, name, filename), name, filename)
-
- def wrap(self, stream, name=None, filename=None):
- """This is called with the stream as returned by `tokenize` and wraps
- every token in a :class:`Token` and converts the value.
- """
- for lineno, token, value in stream:
- if token in ignored_tokens:
- continue
+ stream = self.tokeniter(source, name, filename, state)
+ return TokenStream(self.wrap(stream, name, filename), name, filename)
+
+ def wrap(self, stream, name=None, filename=None):
+ """This is called with the stream as returned by `tokenize` and wraps
+ every token in a :class:`Token` and converts the value.
+ """
+ for lineno, token, value in stream:
+ if token in ignored_tokens:
+ continue
elif token == TOKEN_LINESTATEMENT_BEGIN:
token = TOKEN_BLOCK_BEGIN
elif token == TOKEN_LINESTATEMENT_END:
token = TOKEN_BLOCK_END
- # we are not interested in those tokens in the parser
+ # we are not interested in those tokens in the parser
elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
- continue
+ continue
elif token == TOKEN_DATA:
- value = self._normalize_newlines(value)
+ value = self._normalize_newlines(value)
elif token == "keyword":
- token = value
+ token = value
elif token == TOKEN_NAME:
- value = str(value)
- if check_ident and not value.isidentifier():
- raise TemplateSyntaxError(
+ value = str(value)
+ if check_ident and not value.isidentifier():
+ raise TemplateSyntaxError(
"Invalid character in identifier", lineno, name, filename
)
elif token == TOKEN_STRING:
- # try to unescape string
- try:
+ # try to unescape string
+ try:
value = (
self._normalize_newlines(value[1:-1])
.encode("ascii", "backslashreplace")
.decode("unicode-escape")
)
- except Exception as e:
+ except Exception as e:
msg = str(e).split(":")[-1].strip()
- raise TemplateSyntaxError(msg, lineno, name, filename)
+ raise TemplateSyntaxError(msg, lineno, name, filename)
elif token == TOKEN_INTEGER:
value = int(value.replace("_", ""))
elif token == TOKEN_FLOAT:
# remove all "_" first to support more Python versions
value = literal_eval(value.replace("_", ""))
elif token == TOKEN_OPERATOR:
- token = operators[value]
- yield Token(lineno, token, value)
-
- def tokeniter(self, source, name, filename=None, state=None):
- """This method tokenizes the text and returns the tokens in a
- generator. Use this method if you just want to tokenize a template.
- """
- source = text_type(source)
- lines = source.splitlines()
- if self.keep_trailing_newline and source:
+ token = operators[value]
+ yield Token(lineno, token, value)
+
+ def tokeniter(self, source, name, filename=None, state=None):
+ """This method tokenizes the text and returns the tokens in a
+ generator. Use this method if you just want to tokenize a template.
+ """
+ source = text_type(source)
+ lines = source.splitlines()
+ if self.keep_trailing_newline and source:
for newline in ("\r\n", "\r", "\n"):
- if source.endswith(newline):
+ if source.endswith(newline):
lines.append("")
- break
+ break
source = "\n".join(lines)
- pos = 0
- lineno = 1
+ pos = 0
+ lineno = 1
stack = ["root"]
if state is not None and state != "root":
assert state in ("variable", "block"), "invalid state"
stack.append(state + "_begin")
- statetokens = self.rules[stack[-1]]
- source_length = len(source)
- balancing_stack = []
+ statetokens = self.rules[stack[-1]]
+ source_length = len(source)
+ balancing_stack = []
lstrip_unless_re = self.lstrip_unless_re
newlines_stripped = 0
line_starting = True
-
- while 1:
- # tokenizer loop
- for regex, tokens, new_state in statetokens:
- m = regex.match(source, pos)
- # if no match we try again with the next rule
- if m is None:
- continue
-
- # we only match blocks and variables if braces / parentheses
- # are balanced. continue parsing with the lower rule which
- # is the operator rule. do this only if the end tags look
- # like operators
+
+ while 1:
+ # tokenizer loop
+ for regex, tokens, new_state in statetokens:
+ m = regex.match(source, pos)
+ # if no match we try again with the next rule
+ if m is None:
+ continue
+
+ # we only match blocks and variables if braces / parentheses
+ # are balanced. continue parsing with the lower rule which
+ # is the operator rule. do this only if the end tags look
+ # like operators
if balancing_stack and tokens in (
TOKEN_VARIABLE_END,
TOKEN_BLOCK_END,
TOKEN_LINESTATEMENT_END,
):
- continue
-
- # tuples support more options
- if isinstance(tokens, tuple):
+ continue
+
+ # tuples support more options
+ if isinstance(tokens, tuple):
groups = m.groups()
if isinstance(tokens, OptionalLStrip):
@@ -738,37 +738,37 @@ class Lexer(object):
if not lstrip_unless_re.search(text, l_pos):
groups = (text[:l_pos],) + groups[1:]
- for idx, token in enumerate(tokens):
- # failure group
- if token.__class__ is Failure:
- raise token(lineno, filename)
- # bygroup is a bit more complex, in that case we
- # yield for the current token the first named
- # group that matched
+ for idx, token in enumerate(tokens):
+ # failure group
+ if token.__class__ is Failure:
+ raise token(lineno, filename)
+ # bygroup is a bit more complex, in that case we
+ # yield for the current token the first named
+ # group that matched
elif token == "#bygroup":
- for key, value in iteritems(m.groupdict()):
- if value is not None:
- yield lineno, key, value
+ for key, value in iteritems(m.groupdict()):
+ if value is not None:
+ yield lineno, key, value
lineno += value.count("\n")
- break
- else:
+ break
+ else:
raise RuntimeError(
"%r wanted to resolve "
"the token dynamically"
" but no group matched" % regex
)
- # normal group
- else:
+ # normal group
+ else:
data = groups[idx]
- if data or token not in ignore_if_empty:
- yield lineno, token, data
+ if data or token not in ignore_if_empty:
+ yield lineno, token, data
lineno += data.count("\n") + newlines_stripped
newlines_stripped = 0
-
- # strings as token just are yielded as it.
- else:
- data = m.group()
- # update brace/parentheses balance
+
+ # strings as token just are yielded as it.
+ else:
+ data = m.group()
+ # update brace/parentheses balance
if tokens == TOKEN_OPERATOR:
if data == "{":
balancing_stack.append("}")
@@ -777,12 +777,12 @@ class Lexer(object):
elif data == "[":
balancing_stack.append("]")
elif data in ("}", ")", "]"):
- if not balancing_stack:
+ if not balancing_stack:
raise TemplateSyntaxError(
"unexpected '%s'" % data, lineno, name, filename
)
- expected_op = balancing_stack.pop()
- if expected_op != data:
+ expected_op = balancing_stack.pop()
+ if expected_op != data:
raise TemplateSyntaxError(
"unexpected '%s', "
"expected '%s'" % (data, expected_op),
@@ -790,56 +790,56 @@ class Lexer(object):
name,
filename,
)
- # yield items
- if data or tokens not in ignore_if_empty:
- yield lineno, tokens, data
+ # yield items
+ if data or tokens not in ignore_if_empty:
+ yield lineno, tokens, data
lineno += data.count("\n")
-
+
line_starting = m.group()[-1:] == "\n"
- # fetch new position into new variable so that we can check
- # if there is a internal parsing error which would result
- # in an infinite loop
- pos2 = m.end()
-
- # handle state changes
- if new_state is not None:
- # remove the uppermost state
+ # fetch new position into new variable so that we can check
+ # if there is a internal parsing error which would result
+ # in an infinite loop
+ pos2 = m.end()
+
+ # handle state changes
+ if new_state is not None:
+ # remove the uppermost state
if new_state == "#pop":
- stack.pop()
- # resolve the new state by group checking
+ stack.pop()
+ # resolve the new state by group checking
elif new_state == "#bygroup":
- for key, value in iteritems(m.groupdict()):
- if value is not None:
- stack.append(key)
- break
- else:
+ for key, value in iteritems(m.groupdict()):
+ if value is not None:
+ stack.append(key)
+ break
+ else:
raise RuntimeError(
"%r wanted to resolve the "
"new state dynamically but"
" no group matched" % regex
)
- # direct state name given
- else:
- stack.append(new_state)
- statetokens = self.rules[stack[-1]]
- # we are still at the same position and no stack change.
- # this means a loop without break condition, avoid that and
- # raise error
- elif pos2 == pos:
+ # direct state name given
+ else:
+ stack.append(new_state)
+ statetokens = self.rules[stack[-1]]
+ # we are still at the same position and no stack change.
+ # this means a loop without break condition, avoid that and
+ # raise error
+ elif pos2 == pos:
raise RuntimeError(
"%r yielded empty string without stack change" % regex
)
- # publish new function and start again
- pos = pos2
- break
- # if loop terminated without break we haven't found a single match
- # either we are at the end of the file or we have a problem
- else:
- # end of text
- if pos >= source_length:
- return
- # something went wrong
+ # publish new function and start again
+ pos = pos2
+ break
+ # if loop terminated without break we haven't found a single match
+ # either we are at the end of the file or we have a problem
+ else:
+ # end of text
+ if pos >= source_length:
+ return
+ # something went wrong
raise TemplateSyntaxError(
"unexpected char %r at %d" % (source[pos], pos),
lineno,
diff --git a/contrib/python/Jinja2/py2/jinja2/loaders.py b/contrib/python/Jinja2/py2/jinja2/loaders.py
index bad1774ab0..8bd29db208 100644
--- a/contrib/python/Jinja2/py2/jinja2/loaders.py
+++ b/contrib/python/Jinja2/py2/jinja2/loaders.py
@@ -1,14 +1,14 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""API and implementations for loading templates from different data
sources.
-"""
-import os
-import sys
-import weakref
+"""
+import os
+import sys
+import weakref
from hashlib import sha1
from os import path
-from types import ModuleType
-
+from types import ModuleType
+
from ._compat import abc
from ._compat import fspath
from ._compat import iteritems
@@ -16,503 +16,503 @@ from ._compat import string_types
from .exceptions import TemplateNotFound
from .utils import internalcode
from .utils import open_if_exists
+
-
-def split_template_path(template):
- """Split a path into segments and perform a sanity check. If it detects
- '..' in the path it will raise a `TemplateNotFound` error.
- """
- pieces = []
+def split_template_path(template):
+ """Split a path into segments and perform a sanity check. If it detects
+ '..' in the path it will raise a `TemplateNotFound` error.
+ """
+ pieces = []
for piece in template.split("/"):
if (
path.sep in piece
or (path.altsep and path.altsep in piece)
or piece == path.pardir
):
- raise TemplateNotFound(template)
+ raise TemplateNotFound(template)
elif piece and piece != ".":
- pieces.append(piece)
- return pieces
-
-
-class BaseLoader(object):
- """Baseclass for all loaders. Subclass this and override `get_source` to
- implement a custom loading mechanism. The environment provides a
- `get_template` method that calls the loader's `load` method to get the
- :class:`Template` object.
-
- A very basic example for a loader that looks up templates on the file
- system could look like this::
-
- from jinja2 import BaseLoader, TemplateNotFound
- from os.path import join, exists, getmtime
-
- class MyLoader(BaseLoader):
-
- def __init__(self, path):
- self.path = path
-
- def get_source(self, environment, template):
- path = join(self.path, template)
- if not exists(path):
- raise TemplateNotFound(template)
- mtime = getmtime(path)
- with file(path) as f:
- source = f.read().decode('utf-8')
- return source, path, lambda: mtime == getmtime(path)
- """
-
- #: if set to `False` it indicates that the loader cannot provide access
- #: to the source of templates.
- #:
- #: .. versionadded:: 2.4
- has_source_access = True
-
- def get_source(self, environment, template):
- """Get the template source, filename and reload helper for a template.
- It's passed the environment and template name and has to return a
- tuple in the form ``(source, filename, uptodate)`` or raise a
- `TemplateNotFound` error if it can't locate the template.
-
- The source part of the returned tuple must be the source of the
- template as unicode string or a ASCII bytestring. The filename should
- be the name of the file on the filesystem if it was loaded from there,
- otherwise `None`. The filename is used by python for the tracebacks
- if no loader extension is used.
-
- The last item in the tuple is the `uptodate` function. If auto
- reloading is enabled it's always called to check if the template
- changed. No arguments are passed so the function must store the
- old state somewhere (for example in a closure). If it returns `False`
- the template will be reloaded.
- """
- if not self.has_source_access:
+ pieces.append(piece)
+ return pieces
+
+
+class BaseLoader(object):
+ """Baseclass for all loaders. Subclass this and override `get_source` to
+ implement a custom loading mechanism. The environment provides a
+ `get_template` method that calls the loader's `load` method to get the
+ :class:`Template` object.
+
+ A very basic example for a loader that looks up templates on the file
+ system could look like this::
+
+ from jinja2 import BaseLoader, TemplateNotFound
+ from os.path import join, exists, getmtime
+
+ class MyLoader(BaseLoader):
+
+ def __init__(self, path):
+ self.path = path
+
+ def get_source(self, environment, template):
+ path = join(self.path, template)
+ if not exists(path):
+ raise TemplateNotFound(template)
+ mtime = getmtime(path)
+ with file(path) as f:
+ source = f.read().decode('utf-8')
+ return source, path, lambda: mtime == getmtime(path)
+ """
+
+ #: if set to `False` it indicates that the loader cannot provide access
+ #: to the source of templates.
+ #:
+ #: .. versionadded:: 2.4
+ has_source_access = True
+
+ def get_source(self, environment, template):
+ """Get the template source, filename and reload helper for a template.
+ It's passed the environment and template name and has to return a
+ tuple in the form ``(source, filename, uptodate)`` or raise a
+ `TemplateNotFound` error if it can't locate the template.
+
+ The source part of the returned tuple must be the source of the
+ template as unicode string or a ASCII bytestring. The filename should
+ be the name of the file on the filesystem if it was loaded from there,
+ otherwise `None`. The filename is used by python for the tracebacks
+ if no loader extension is used.
+
+ The last item in the tuple is the `uptodate` function. If auto
+ reloading is enabled it's always called to check if the template
+ changed. No arguments are passed so the function must store the
+ old state somewhere (for example in a closure). If it returns `False`
+ the template will be reloaded.
+ """
+ if not self.has_source_access:
raise RuntimeError(
"%s cannot provide access to the source" % self.__class__.__name__
)
- raise TemplateNotFound(template)
-
- def list_templates(self):
- """Iterates over all templates. If the loader does not support that
- it should raise a :exc:`TypeError` which is the default behavior.
- """
+ raise TemplateNotFound(template)
+
+ def list_templates(self):
+ """Iterates over all templates. If the loader does not support that
+ it should raise a :exc:`TypeError` which is the default behavior.
+ """
raise TypeError("this loader cannot iterate over all templates")
-
- @internalcode
- def load(self, environment, name, globals=None):
- """Loads a template. This method looks up the template in the cache
- or loads one by calling :meth:`get_source`. Subclasses should not
- override this method as loaders working on collections of other
- loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
- will not call this method but `get_source` directly.
- """
- code = None
- if globals is None:
- globals = {}
-
- # first we try to get the source for this template together
- # with the filename and the uptodate function.
- source, filename, uptodate = self.get_source(environment, name)
-
- # try to load the code from the bytecode cache if there is a
- # bytecode cache configured.
- bcc = environment.bytecode_cache
- if bcc is not None:
- bucket = bcc.get_bucket(environment, name, filename, source)
- code = bucket.code
-
- # if we don't have code so far (not cached, no longer up to
- # date) etc. we compile the template
- if code is None:
- code = environment.compile(source, name, filename)
-
- # if the bytecode cache is available and the bucket doesn't
- # have a code so far, we give the bucket the new code and put
- # it back to the bytecode cache.
- if bcc is not None and bucket.code is None:
- bucket.code = code
- bcc.set_bucket(bucket)
-
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ """Loads a template. This method looks up the template in the cache
+ or loads one by calling :meth:`get_source`. Subclasses should not
+ override this method as loaders working on collections of other
+ loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
+ will not call this method but `get_source` directly.
+ """
+ code = None
+ if globals is None:
+ globals = {}
+
+ # first we try to get the source for this template together
+ # with the filename and the uptodate function.
+ source, filename, uptodate = self.get_source(environment, name)
+
+ # try to load the code from the bytecode cache if there is a
+ # bytecode cache configured.
+ bcc = environment.bytecode_cache
+ if bcc is not None:
+ bucket = bcc.get_bucket(environment, name, filename, source)
+ code = bucket.code
+
+ # if we don't have code so far (not cached, no longer up to
+ # date) etc. we compile the template
+ if code is None:
+ code = environment.compile(source, name, filename)
+
+ # if the bytecode cache is available and the bucket doesn't
+ # have a code so far, we give the bucket the new code and put
+ # it back to the bytecode cache.
+ if bcc is not None and bucket.code is None:
+ bucket.code = code
+ bcc.set_bucket(bucket)
+
return environment.template_class.from_code(
environment, code, globals, uptodate
)
-
-
-class FileSystemLoader(BaseLoader):
- """Loads templates from the file system. This loader can find templates
- in folders on the file system and is the preferred way to load them.
-
- The loader takes the path to the templates as string, or if multiple
- locations are wanted a list of them which is then looked up in the
- given order::
-
- >>> loader = FileSystemLoader('/path/to/templates')
- >>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
-
- Per default the template encoding is ``'utf-8'`` which can be changed
- by setting the `encoding` parameter to something else.
-
- To follow symbolic links, set the *followlinks* parameter to ``True``::
-
- >>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
-
+
+
+class FileSystemLoader(BaseLoader):
+ """Loads templates from the file system. This loader can find templates
+ in folders on the file system and is the preferred way to load them.
+
+ The loader takes the path to the templates as string, or if multiple
+ locations are wanted a list of them which is then looked up in the
+ given order::
+
+ >>> loader = FileSystemLoader('/path/to/templates')
+ >>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
+
+ Per default the template encoding is ``'utf-8'`` which can be changed
+ by setting the `encoding` parameter to something else.
+
+ To follow symbolic links, set the *followlinks* parameter to ``True``::
+
+ >>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
+
.. versionchanged:: 2.8
The ``followlinks`` parameter was added.
- """
-
+ """
+
def __init__(self, searchpath, encoding="utf-8", followlinks=False):
if not isinstance(searchpath, abc.Iterable) or isinstance(
searchpath, string_types
):
- searchpath = [searchpath]
+ searchpath = [searchpath]
# In Python 3.5, os.path.join doesn't support Path. This can be
# simplified to list(searchpath) when Python 3.5 is dropped.
self.searchpath = [fspath(p) for p in searchpath]
- self.encoding = encoding
- self.followlinks = followlinks
-
- def get_source(self, environment, template):
- pieces = split_template_path(template)
- for searchpath in self.searchpath:
- filename = path.join(searchpath, *pieces)
- f = open_if_exists(filename)
- if f is None:
- continue
- try:
- contents = f.read().decode(self.encoding)
- finally:
- f.close()
-
- mtime = path.getmtime(filename)
-
- def uptodate():
- try:
- return path.getmtime(filename) == mtime
- except OSError:
- return False
-
- return contents, filename, uptodate
- raise TemplateNotFound(template)
-
- def list_templates(self):
- found = set()
- for searchpath in self.searchpath:
- walk_dir = os.walk(searchpath, followlinks=self.followlinks)
+ self.encoding = encoding
+ self.followlinks = followlinks
+
+ def get_source(self, environment, template):
+ pieces = split_template_path(template)
+ for searchpath in self.searchpath:
+ filename = path.join(searchpath, *pieces)
+ f = open_if_exists(filename)
+ if f is None:
+ continue
+ try:
+ contents = f.read().decode(self.encoding)
+ finally:
+ f.close()
+
+ mtime = path.getmtime(filename)
+
+ def uptodate():
+ try:
+ return path.getmtime(filename) == mtime
+ except OSError:
+ return False
+
+ return contents, filename, uptodate
+ raise TemplateNotFound(template)
+
+ def list_templates(self):
+ found = set()
+ for searchpath in self.searchpath:
+ walk_dir = os.walk(searchpath, followlinks=self.followlinks)
for dirpath, _, filenames in walk_dir:
- for filename in filenames:
+ for filename in filenames:
template = (
os.path.join(dirpath, filename)[len(searchpath) :]
.strip(os.path.sep)
.replace(os.path.sep, "/")
)
if template[:2] == "./":
- template = template[2:]
- if template not in found:
- found.add(template)
- return sorted(found)
-
-
-class PackageLoader(BaseLoader):
- """Load templates from python eggs or packages. It is constructed with
- the name of the python package and the path to the templates in that
- package::
-
- loader = PackageLoader('mypackage', 'views')
-
- If the package path is not given, ``'templates'`` is assumed.
-
- Per default the template encoding is ``'utf-8'`` which can be changed
- by setting the `encoding` parameter to something else. Due to the nature
- of eggs it's only possible to reload templates if the package was loaded
- from the file system and not a zip file.
- """
-
+ template = template[2:]
+ if template not in found:
+ found.add(template)
+ return sorted(found)
+
+
+class PackageLoader(BaseLoader):
+ """Load templates from python eggs or packages. It is constructed with
+ the name of the python package and the path to the templates in that
+ package::
+
+ loader = PackageLoader('mypackage', 'views')
+
+ If the package path is not given, ``'templates'`` is assumed.
+
+ Per default the template encoding is ``'utf-8'`` which can be changed
+ by setting the `encoding` parameter to something else. Due to the nature
+ of eggs it's only possible to reload templates if the package was loaded
+ from the file system and not a zip file.
+ """
+
def __init__(self, package_name, package_path="templates", encoding="utf-8"):
from pkg_resources import DefaultProvider
from pkg_resources import get_provider
from pkg_resources import ResourceManager
provider = get_provider(package_name)
- self.encoding = encoding
- self.manager = ResourceManager()
- self.filesystem_bound = isinstance(provider, DefaultProvider)
- self.provider = provider
- self.package_path = package_path
-
- def get_source(self, environment, template):
- pieces = split_template_path(template)
+ self.encoding = encoding
+ self.manager = ResourceManager()
+ self.filesystem_bound = isinstance(provider, DefaultProvider)
+ self.provider = provider
+ self.package_path = package_path
+
+ def get_source(self, environment, template):
+ pieces = split_template_path(template)
p = "/".join((self.package_path,) + tuple(pieces))
- if not self.provider.has_resource(p):
- raise TemplateNotFound(template)
-
- filename = uptodate = None
-
- if self.filesystem_bound:
- filename = self.provider.get_resource_filename(self.manager, p)
- mtime = path.getmtime(filename)
-
- def uptodate():
- try:
- return path.getmtime(filename) == mtime
- except OSError:
- return False
-
- source = self.provider.get_resource_string(self.manager, p)
- return source.decode(self.encoding), filename, uptodate
-
- def list_templates(self):
- path = self.package_path
+ if not self.provider.has_resource(p):
+ raise TemplateNotFound(template)
+
+ filename = uptodate = None
+
+ if self.filesystem_bound:
+ filename = self.provider.get_resource_filename(self.manager, p)
+ mtime = path.getmtime(filename)
+
+ def uptodate():
+ try:
+ return path.getmtime(filename) == mtime
+ except OSError:
+ return False
+
+ source = self.provider.get_resource_string(self.manager, p)
+ return source.decode(self.encoding), filename, uptodate
+
+ def list_templates(self):
+ path = self.package_path
if path[:2] == "./":
- path = path[2:]
+ path = path[2:]
elif path == ".":
path = ""
- offset = len(path)
- results = []
+ offset = len(path)
+ results = []
- def _walk(path):
- for filename in self.provider.resource_listdir(path):
+ def _walk(path):
+ for filename in self.provider.resource_listdir(path):
fullname = path + "/" + filename
- if self.provider.resource_isdir(fullname):
- _walk(fullname)
- else:
+ if self.provider.resource_isdir(fullname):
+ _walk(fullname)
+ else:
results.append(fullname[offset:].lstrip("/"))
- _walk(path)
- results.sort()
- return results
-
-
-class DictLoader(BaseLoader):
- """Loads a template from a python dict. It's passed a dict of unicode
- strings bound to template names. This loader is useful for unittesting:
-
- >>> loader = DictLoader({'index.html': 'source here'})
-
- Because auto reloading is rarely useful this is disabled per default.
- """
-
- def __init__(self, mapping):
- self.mapping = mapping
-
- def get_source(self, environment, template):
- if template in self.mapping:
- source = self.mapping[template]
- return source, None, lambda: source == self.mapping.get(template)
- raise TemplateNotFound(template)
-
- def list_templates(self):
- return sorted(self.mapping)
-
-
-class FunctionLoader(BaseLoader):
- """A loader that is passed a function which does the loading. The
- function receives the name of the template and has to return either
- an unicode string with the template source, a tuple in the form ``(source,
- filename, uptodatefunc)`` or `None` if the template does not exist.
-
- >>> def load_template(name):
- ... if name == 'index.html':
- ... return '...'
- ...
- >>> loader = FunctionLoader(load_template)
-
- The `uptodatefunc` is a function that is called if autoreload is enabled
- and has to return `True` if the template is still up to date. For more
- details have a look at :meth:`BaseLoader.get_source` which has the same
- return value.
- """
-
- def __init__(self, load_func):
- self.load_func = load_func
-
- def get_source(self, environment, template):
- rv = self.load_func(template)
- if rv is None:
- raise TemplateNotFound(template)
- elif isinstance(rv, string_types):
- return rv, None, None
- return rv
-
-
-class PrefixLoader(BaseLoader):
- """A loader that is passed a dict of loaders where each loader is bound
- to a prefix. The prefix is delimited from the template by a slash per
- default, which can be changed by setting the `delimiter` argument to
- something else::
-
- loader = PrefixLoader({
- 'app1': PackageLoader('mypackage.app1'),
- 'app2': PackageLoader('mypackage.app2')
- })
-
- By loading ``'app1/index.html'`` the file from the app1 package is loaded,
- by loading ``'app2/index.html'`` the file from the second.
- """
-
+ _walk(path)
+ results.sort()
+ return results
+
+
+class DictLoader(BaseLoader):
+ """Loads a template from a python dict. It's passed a dict of unicode
+ strings bound to template names. This loader is useful for unittesting:
+
+ >>> loader = DictLoader({'index.html': 'source here'})
+
+ Because auto reloading is rarely useful this is disabled per default.
+ """
+
+ def __init__(self, mapping):
+ self.mapping = mapping
+
+ def get_source(self, environment, template):
+ if template in self.mapping:
+ source = self.mapping[template]
+ return source, None, lambda: source == self.mapping.get(template)
+ raise TemplateNotFound(template)
+
+ def list_templates(self):
+ return sorted(self.mapping)
+
+
+class FunctionLoader(BaseLoader):
+ """A loader that is passed a function which does the loading. The
+ function receives the name of the template and has to return either
+ an unicode string with the template source, a tuple in the form ``(source,
+ filename, uptodatefunc)`` or `None` if the template does not exist.
+
+ >>> def load_template(name):
+ ... if name == 'index.html':
+ ... return '...'
+ ...
+ >>> loader = FunctionLoader(load_template)
+
+ The `uptodatefunc` is a function that is called if autoreload is enabled
+ and has to return `True` if the template is still up to date. For more
+ details have a look at :meth:`BaseLoader.get_source` which has the same
+ return value.
+ """
+
+ def __init__(self, load_func):
+ self.load_func = load_func
+
+ def get_source(self, environment, template):
+ rv = self.load_func(template)
+ if rv is None:
+ raise TemplateNotFound(template)
+ elif isinstance(rv, string_types):
+ return rv, None, None
+ return rv
+
+
+class PrefixLoader(BaseLoader):
+ """A loader that is passed a dict of loaders where each loader is bound
+ to a prefix. The prefix is delimited from the template by a slash per
+ default, which can be changed by setting the `delimiter` argument to
+ something else::
+
+ loader = PrefixLoader({
+ 'app1': PackageLoader('mypackage.app1'),
+ 'app2': PackageLoader('mypackage.app2')
+ })
+
+ By loading ``'app1/index.html'`` the file from the app1 package is loaded,
+ by loading ``'app2/index.html'`` the file from the second.
+ """
+
def __init__(self, mapping, delimiter="/"):
- self.mapping = mapping
- self.delimiter = delimiter
-
- def get_loader(self, template):
- try:
- prefix, name = template.split(self.delimiter, 1)
- loader = self.mapping[prefix]
- except (ValueError, KeyError):
- raise TemplateNotFound(template)
- return loader, name
-
- def get_source(self, environment, template):
- loader, name = self.get_loader(template)
- try:
- return loader.get_source(environment, name)
- except TemplateNotFound:
- # re-raise the exception with the correct filename here.
- # (the one that includes the prefix)
- raise TemplateNotFound(template)
-
- @internalcode
- def load(self, environment, name, globals=None):
- loader, local_name = self.get_loader(name)
- try:
- return loader.load(environment, local_name, globals)
- except TemplateNotFound:
- # re-raise the exception with the correct filename here.
- # (the one that includes the prefix)
- raise TemplateNotFound(name)
-
- def list_templates(self):
- result = []
- for prefix, loader in iteritems(self.mapping):
- for template in loader.list_templates():
- result.append(prefix + self.delimiter + template)
- return result
-
-
-class ChoiceLoader(BaseLoader):
- """This loader works like the `PrefixLoader` just that no prefix is
- specified. If a template could not be found by one loader the next one
- is tried.
-
- >>> loader = ChoiceLoader([
- ... FileSystemLoader('/path/to/user/templates'),
- ... FileSystemLoader('/path/to/system/templates')
- ... ])
-
- This is useful if you want to allow users to override builtin templates
- from a different location.
- """
-
- def __init__(self, loaders):
- self.loaders = loaders
-
- def get_source(self, environment, template):
- for loader in self.loaders:
- try:
- return loader.get_source(environment, template)
- except TemplateNotFound:
- pass
- raise TemplateNotFound(template)
-
- @internalcode
- def load(self, environment, name, globals=None):
- for loader in self.loaders:
- try:
- return loader.load(environment, name, globals)
- except TemplateNotFound:
- pass
- raise TemplateNotFound(name)
-
- def list_templates(self):
- found = set()
- for loader in self.loaders:
- found.update(loader.list_templates())
- return sorted(found)
-
-
-class _TemplateModule(ModuleType):
- """Like a normal module but with support for weak references"""
-
-
-class ModuleLoader(BaseLoader):
- """This loader loads templates from precompiled templates.
-
- Example usage:
-
- >>> loader = ChoiceLoader([
- ... ModuleLoader('/path/to/compiled/templates'),
- ... FileSystemLoader('/path/to/templates')
- ... ])
-
- Templates can be precompiled with :meth:`Environment.compile_templates`.
- """
-
- has_source_access = False
-
- def __init__(self, path):
+ self.mapping = mapping
+ self.delimiter = delimiter
+
+ def get_loader(self, template):
+ try:
+ prefix, name = template.split(self.delimiter, 1)
+ loader = self.mapping[prefix]
+ except (ValueError, KeyError):
+ raise TemplateNotFound(template)
+ return loader, name
+
+ def get_source(self, environment, template):
+ loader, name = self.get_loader(template)
+ try:
+ return loader.get_source(environment, name)
+ except TemplateNotFound:
+ # re-raise the exception with the correct filename here.
+ # (the one that includes the prefix)
+ raise TemplateNotFound(template)
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ loader, local_name = self.get_loader(name)
+ try:
+ return loader.load(environment, local_name, globals)
+ except TemplateNotFound:
+ # re-raise the exception with the correct filename here.
+ # (the one that includes the prefix)
+ raise TemplateNotFound(name)
+
+ def list_templates(self):
+ result = []
+ for prefix, loader in iteritems(self.mapping):
+ for template in loader.list_templates():
+ result.append(prefix + self.delimiter + template)
+ return result
+
+
+class ChoiceLoader(BaseLoader):
+ """This loader works like the `PrefixLoader` just that no prefix is
+ specified. If a template could not be found by one loader the next one
+ is tried.
+
+ >>> loader = ChoiceLoader([
+ ... FileSystemLoader('/path/to/user/templates'),
+ ... FileSystemLoader('/path/to/system/templates')
+ ... ])
+
+ This is useful if you want to allow users to override builtin templates
+ from a different location.
+ """
+
+ def __init__(self, loaders):
+ self.loaders = loaders
+
+ def get_source(self, environment, template):
+ for loader in self.loaders:
+ try:
+ return loader.get_source(environment, template)
+ except TemplateNotFound:
+ pass
+ raise TemplateNotFound(template)
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ for loader in self.loaders:
+ try:
+ return loader.load(environment, name, globals)
+ except TemplateNotFound:
+ pass
+ raise TemplateNotFound(name)
+
+ def list_templates(self):
+ found = set()
+ for loader in self.loaders:
+ found.update(loader.list_templates())
+ return sorted(found)
+
+
+class _TemplateModule(ModuleType):
+ """Like a normal module but with support for weak references"""
+
+
+class ModuleLoader(BaseLoader):
+ """This loader loads templates from precompiled templates.
+
+ Example usage:
+
+ >>> loader = ChoiceLoader([
+ ... ModuleLoader('/path/to/compiled/templates'),
+ ... FileSystemLoader('/path/to/templates')
+ ... ])
+
+ Templates can be precompiled with :meth:`Environment.compile_templates`.
+ """
+
+ has_source_access = False
+
+ def __init__(self, path):
package_name = "_jinja2_module_templates_%x" % id(self)
-
- # create a fake module that looks for the templates in the
- # path given.
- mod = _TemplateModule(package_name)
+
+ # create a fake module that looks for the templates in the
+ # path given.
+ mod = _TemplateModule(package_name)
if not isinstance(path, abc.Iterable) or isinstance(path, string_types):
- path = [path]
-
+ path = [path]
+
mod.__path__ = [fspath(p) for p in path]
-
+
sys.modules[package_name] = weakref.proxy(
mod, lambda x: sys.modules.pop(package_name, None)
)
- # the only strong reference, the sys.modules entry is weak
- # so that the garbage collector can remove it once the
- # loader that created it goes out of business.
- self.module = mod
- self.package_name = package_name
-
- @staticmethod
- def get_template_key(name):
+ # the only strong reference, the sys.modules entry is weak
+ # so that the garbage collector can remove it once the
+ # loader that created it goes out of business.
+ self.module = mod
+ self.package_name = package_name
+
+ @staticmethod
+ def get_template_key(name):
return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
-
- @staticmethod
- def get_module_filename(name):
+
+ @staticmethod
+ def get_module_filename(name):
return ModuleLoader.get_template_key(name) + ".py"
-
- @internalcode
- def load(self, environment, name, globals=None):
- key = self.get_template_key(name)
+
+ @internalcode
+ def load(self, environment, name, globals=None):
+ key = self.get_template_key(name)
module = "%s.%s" % (self.package_name, key)
- mod = getattr(self.module, module, None)
- if mod is None:
- try:
+ mod = getattr(self.module, module, None)
+ if mod is None:
+ try:
mod = __import__(module, None, None, ["root"])
- except ImportError:
- raise TemplateNotFound(name)
-
- # remove the entry from sys.modules, we only want the attribute
- # on the module object we have stored on the loader.
- sys.modules.pop(module, None)
-
- return environment.template_class.from_module_dict(
+ except ImportError:
+ raise TemplateNotFound(name)
+
+ # remove the entry from sys.modules, we only want the attribute
+ # on the module object we have stored on the loader.
+ sys.modules.pop(module, None)
+
+ return environment.template_class.from_module_dict(
environment, mod.__dict__, globals
)
-
-
-class ResourceLoader(BaseLoader):
- def __init__(self, prefix, module_loader):
- self.prefix = prefix
- self.module_loader = module_loader
-
- def get_source(self, environment, template):
- if self.module_loader is None:
- raise TemplateNotFound(template)
- try:
+
+
+class ResourceLoader(BaseLoader):
+ def __init__(self, prefix, module_loader):
+ self.prefix = prefix
+ self.module_loader = module_loader
+
+ def get_source(self, environment, template):
+ if self.module_loader is None:
+ raise TemplateNotFound(template)
+ try:
return self.module_loader.get_data(os.path.join(self.prefix, template)).decode('utf-8'), None, None
- except IOError:
+ except IOError:
raise TemplateNotFound(template)
diff --git a/contrib/python/Jinja2/py2/jinja2/meta.py b/contrib/python/Jinja2/py2/jinja2/meta.py
index 3795aace59..17144c65fa 100644
--- a/contrib/python/Jinja2/py2/jinja2/meta.py
+++ b/contrib/python/Jinja2/py2/jinja2/meta.py
@@ -1,101 +1,101 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Functions that expose information about templates that might be
interesting for introspection.
-"""
+"""
from . import nodes
from ._compat import iteritems
from ._compat import string_types
from .compiler import CodeGenerator
-
-
-class TrackingCodeGenerator(CodeGenerator):
- """We abuse the code generator for introspection."""
-
- def __init__(self, environment):
+
+
+class TrackingCodeGenerator(CodeGenerator):
+ """We abuse the code generator for introspection."""
+
+ def __init__(self, environment):
CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
- self.undeclared_identifiers = set()
-
- def write(self, x):
- """Don't write."""
-
- def enter_frame(self, frame):
- """Remember all undeclared identifiers."""
- CodeGenerator.enter_frame(self, frame)
- for _, (action, param) in iteritems(frame.symbols.loads):
+ self.undeclared_identifiers = set()
+
+ def write(self, x):
+ """Don't write."""
+
+ def enter_frame(self, frame):
+ """Remember all undeclared identifiers."""
+ CodeGenerator.enter_frame(self, frame)
+ for _, (action, param) in iteritems(frame.symbols.loads):
if action == "resolve" and param not in self.environment.globals:
- self.undeclared_identifiers.add(param)
-
-
-def find_undeclared_variables(ast):
- """Returns a set of all variables in the AST that will be looked up from
- the context at runtime. Because at compile time it's not known which
- variables will be used depending on the path the execution takes at
- runtime, all variables are returned.
-
- >>> from jinja2 import Environment, meta
- >>> env = Environment()
- >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
- >>> meta.find_undeclared_variables(ast) == set(['bar'])
- True
-
- .. admonition:: Implementation
-
- Internally the code generator is used for finding undeclared variables.
- This is good to know because the code generator might raise a
- :exc:`TemplateAssertionError` during compilation and as a matter of
- fact this function can currently raise that exception as well.
- """
- codegen = TrackingCodeGenerator(ast.environment)
- codegen.visit(ast)
- return codegen.undeclared_identifiers
-
-
-def find_referenced_templates(ast):
- """Finds all the referenced templates from the AST. This will return an
- iterator over all the hardcoded template extensions, inclusions and
- imports. If dynamic inheritance or inclusion is used, `None` will be
- yielded.
-
- >>> from jinja2 import Environment, meta
- >>> env = Environment()
- >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
- >>> list(meta.find_referenced_templates(ast))
- ['layout.html', None]
-
- This function is useful for dependency tracking. For example if you want
- to rebuild parts of the website after a layout template has changed.
- """
+ self.undeclared_identifiers.add(param)
+
+
+def find_undeclared_variables(ast):
+ """Returns a set of all variables in the AST that will be looked up from
+ the context at runtime. Because at compile time it's not known which
+ variables will be used depending on the path the execution takes at
+ runtime, all variables are returned.
+
+ >>> from jinja2 import Environment, meta
+ >>> env = Environment()
+ >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
+ >>> meta.find_undeclared_variables(ast) == set(['bar'])
+ True
+
+ .. admonition:: Implementation
+
+ Internally the code generator is used for finding undeclared variables.
+ This is good to know because the code generator might raise a
+ :exc:`TemplateAssertionError` during compilation and as a matter of
+ fact this function can currently raise that exception as well.
+ """
+ codegen = TrackingCodeGenerator(ast.environment)
+ codegen.visit(ast)
+ return codegen.undeclared_identifiers
+
+
+def find_referenced_templates(ast):
+ """Finds all the referenced templates from the AST. This will return an
+ iterator over all the hardcoded template extensions, inclusions and
+ imports. If dynamic inheritance or inclusion is used, `None` will be
+ yielded.
+
+ >>> from jinja2 import Environment, meta
+ >>> env = Environment()
+ >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
+ >>> list(meta.find_referenced_templates(ast))
+ ['layout.html', None]
+
+ This function is useful for dependency tracking. For example if you want
+ to rebuild parts of the website after a layout template has changed.
+ """
for node in ast.find_all(
(nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
):
- if not isinstance(node.template, nodes.Const):
- # a tuple with some non consts in there
- if isinstance(node.template, (nodes.Tuple, nodes.List)):
- for template_name in node.template.items:
- # something const, only yield the strings and ignore
- # non-string consts that really just make no sense
- if isinstance(template_name, nodes.Const):
- if isinstance(template_name.value, string_types):
- yield template_name.value
- # something dynamic in there
- else:
- yield None
- # something dynamic we don't know about here
- else:
- yield None
- continue
- # constant is a basestring, direct template name
- if isinstance(node.template.value, string_types):
- yield node.template.value
- # a tuple or list (latter *should* not happen) made of consts,
- # yield the consts that are strings. We could warn here for
- # non string values
+ if not isinstance(node.template, nodes.Const):
+ # a tuple with some non consts in there
+ if isinstance(node.template, (nodes.Tuple, nodes.List)):
+ for template_name in node.template.items:
+ # something const, only yield the strings and ignore
+ # non-string consts that really just make no sense
+ if isinstance(template_name, nodes.Const):
+ if isinstance(template_name.value, string_types):
+ yield template_name.value
+ # something dynamic in there
+ else:
+ yield None
+ # something dynamic we don't know about here
+ else:
+ yield None
+ continue
+ # constant is a basestring, direct template name
+ if isinstance(node.template.value, string_types):
+ yield node.template.value
+ # a tuple or list (latter *should* not happen) made of consts,
+ # yield the consts that are strings. We could warn here for
+ # non string values
elif isinstance(node, nodes.Include) and isinstance(
node.template.value, (tuple, list)
):
- for template_name in node.template.value:
- if isinstance(template_name, string_types):
- yield template_name
- # something else we don't care about, we could warn here
- else:
- yield None
+ for template_name in node.template.value:
+ if isinstance(template_name, string_types):
+ yield template_name
+ # something else we don't care about, we could warn here
+ else:
+ yield None
diff --git a/contrib/python/Jinja2/py2/jinja2/nativetypes.py b/contrib/python/Jinja2/py2/jinja2/nativetypes.py
index 9f0d39cd98..1ce92ad4c4 100644
--- a/contrib/python/Jinja2/py2/jinja2/nativetypes.py
+++ b/contrib/python/Jinja2/py2/jinja2/nativetypes.py
@@ -1,16 +1,16 @@
-from ast import literal_eval
+from ast import literal_eval
from itertools import chain
from itertools import islice
-
+
from . import nodes
from ._compat import text_type
from .compiler import CodeGenerator
from .compiler import has_safe_repr
from .environment import Environment
from .environment import Template
+
-
-def native_concat(nodes):
+def native_concat(nodes):
"""Return a native Python type from the list of compiled nodes. If
the result is a single node, its value is returned. Otherwise, the
nodes are concatenated as strings. If the result can be parsed with
@@ -18,77 +18,77 @@ def native_concat(nodes):
the string is returned.
:param nodes: Iterable of nodes to concatenate.
- """
- head = list(islice(nodes, 2))
-
- if not head:
- return None
-
- if len(head) == 1:
+ """
+ head = list(islice(nodes, 2))
+
+ if not head:
+ return None
+
+ if len(head) == 1:
raw = head[0]
- else:
+ else:
raw = u"".join([text_type(v) for v in chain(head, nodes)])
-
- try:
+
+ try:
return literal_eval(raw)
- except (ValueError, SyntaxError, MemoryError):
+ except (ValueError, SyntaxError, MemoryError):
return raw
-
-
-class NativeCodeGenerator(CodeGenerator):
+
+
+class NativeCodeGenerator(CodeGenerator):
"""A code generator which renders Python types by not adding
``to_string()`` around output nodes.
- """
-
+ """
+
@staticmethod
def _default_finalize(value):
return value
-
+
def _output_const_repr(self, group):
return repr(u"".join([text_type(v) for v in group]))
-
+
def _output_child_to_const(self, node, frame, finalize):
const = node.as_const(frame.eval_ctx)
-
+
if not has_safe_repr(const):
raise nodes.Impossible()
-
+
if isinstance(node, nodes.TemplateData):
return const
-
+
return finalize.const(const)
-
+
def _output_child_pre(self, node, frame, finalize):
if finalize.src is not None:
self.write(finalize.src)
-
+
def _output_child_post(self, node, frame, finalize):
if finalize.src is not None:
self.write(")")
-
-
+
+
class NativeEnvironment(Environment):
"""An environment that renders templates to native Python types."""
-
+
code_generator_class = NativeCodeGenerator
-
-
+
+
class NativeTemplate(Template):
environment_class = NativeEnvironment
-
- def render(self, *args, **kwargs):
+
+ def render(self, *args, **kwargs):
"""Render the template to produce a native Python type. If the
result is a single node, its value is returned. Otherwise, the
nodes are concatenated as strings. If the result can be parsed
with :func:`ast.literal_eval`, the parsed value is returned.
Otherwise, the string is returned.
- """
- vars = dict(*args, **kwargs)
-
- try:
- return native_concat(self.root_render_func(self.new_context(vars)))
- except Exception:
+ """
+ vars = dict(*args, **kwargs)
+
+ try:
+ return native_concat(self.root_render_func(self.new_context(vars)))
+ except Exception:
return self.environment.handle_exception()
-
-
+
+
NativeEnvironment.template_class = NativeTemplate # type: ignore
diff --git a/contrib/python/Jinja2/py2/jinja2/nodes.py b/contrib/python/Jinja2/py2/jinja2/nodes.py
index 95bd614a14..6466f7bb6d 100644
--- a/contrib/python/Jinja2/py2/jinja2/nodes.py
+++ b/contrib/python/Jinja2/py2/jinja2/nodes.py
@@ -1,19 +1,19 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""AST nodes generated by the parser for the compiler. Also provides
some node tree helper functions used by the parser and compiler in order
to normalize nodes.
-"""
-import operator
-from collections import deque
-
+"""
+import operator
+from collections import deque
+
from markupsafe import Markup
-
+
from ._compat import izip
from ._compat import PY2
from ._compat import text_type
from ._compat import with_metaclass
-
-_binop_to_func = {
+
+_binop_to_func = {
"*": operator.mul,
"/": operator.truediv,
"//": operator.floordiv,
@@ -21,11 +21,11 @@ _binop_to_func = {
"%": operator.mod,
"+": operator.add,
"-": operator.sub,
-}
-
+}
+
_uaop_to_func = {"not": operator.not_, "+": operator.pos, "-": operator.neg}
-
-_cmpop_to_func = {
+
+_cmpop_to_func = {
"eq": operator.eq,
"ne": operator.ne,
"gt": operator.gt,
@@ -34,90 +34,90 @@ _cmpop_to_func = {
"lteq": operator.le,
"in": lambda a, b: a in b,
"notin": lambda a, b: a not in b,
-}
-
-
-class Impossible(Exception):
- """Raised if the node could not perform a requested action."""
-
-
-class NodeType(type):
- """A metaclass for nodes that handles the field and attribute
- inheritance. fields and attributes from the parent class are
- automatically forwarded to the child."""
-
+}
+
+
+class Impossible(Exception):
+ """Raised if the node could not perform a requested action."""
+
+
+class NodeType(type):
+ """A metaclass for nodes that handles the field and attribute
+ inheritance. fields and attributes from the parent class are
+ automatically forwarded to the child."""
+
def __new__(mcs, name, bases, d):
for attr in "fields", "attributes":
- storage = []
- storage.extend(getattr(bases[0], attr, ()))
- storage.extend(d.get(attr, ()))
+ storage = []
+ storage.extend(getattr(bases[0], attr, ()))
+ storage.extend(d.get(attr, ()))
assert len(bases) == 1, "multiple inheritance not allowed"
assert len(storage) == len(set(storage)), "layout conflict"
- d[attr] = tuple(storage)
+ d[attr] = tuple(storage)
d.setdefault("abstract", False)
return type.__new__(mcs, name, bases, d)
-
-
-class EvalContext(object):
- """Holds evaluation time information. Custom attributes can be attached
- to it in extensions.
- """
-
- def __init__(self, environment, template_name=None):
- self.environment = environment
- if callable(environment.autoescape):
- self.autoescape = environment.autoescape(template_name)
- else:
- self.autoescape = environment.autoescape
- self.volatile = False
-
- def save(self):
- return self.__dict__.copy()
-
- def revert(self, old):
- self.__dict__.clear()
- self.__dict__.update(old)
-
-
-def get_eval_context(node, ctx):
- if ctx is None:
- if node.environment is None:
+
+
+class EvalContext(object):
+ """Holds evaluation time information. Custom attributes can be attached
+ to it in extensions.
+ """
+
+ def __init__(self, environment, template_name=None):
+ self.environment = environment
+ if callable(environment.autoescape):
+ self.autoescape = environment.autoescape(template_name)
+ else:
+ self.autoescape = environment.autoescape
+ self.volatile = False
+
+ def save(self):
+ return self.__dict__.copy()
+
+ def revert(self, old):
+ self.__dict__.clear()
+ self.__dict__.update(old)
+
+
+def get_eval_context(node, ctx):
+ if ctx is None:
+ if node.environment is None:
raise RuntimeError(
"if no eval context is passed, the "
"node must have an attached "
"environment."
)
- return EvalContext(node.environment)
- return ctx
-
-
-class Node(with_metaclass(NodeType, object)):
+ return EvalContext(node.environment)
+ return ctx
+
+
+class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja nodes. There are a number of nodes available
- of different types. There are four major types:
-
- - :class:`Stmt`: statements
- - :class:`Expr`: expressions
- - :class:`Helper`: helper nodes
- - :class:`Template`: the outermost wrapper node
-
- All nodes have fields and attributes. Fields may be other nodes, lists,
- or arbitrary values. Fields are passed to the constructor as regular
- positional arguments, attributes as keyword arguments. Each node has
- two attributes: `lineno` (the line number of the node) and `environment`.
- The `environment` attribute is set at the end of the parsing process for
- all nodes automatically.
- """
-
- fields = ()
+ of different types. There are four major types:
+
+ - :class:`Stmt`: statements
+ - :class:`Expr`: expressions
+ - :class:`Helper`: helper nodes
+ - :class:`Template`: the outermost wrapper node
+
+ All nodes have fields and attributes. Fields may be other nodes, lists,
+ or arbitrary values. Fields are passed to the constructor as regular
+ positional arguments, attributes as keyword arguments. Each node has
+ two attributes: `lineno` (the line number of the node) and `environment`.
+ The `environment` attribute is set at the end of the parsing process for
+ all nodes automatically.
+ """
+
+ fields = ()
attributes = ("lineno", "environment")
- abstract = True
-
- def __init__(self, *fields, **attributes):
- if self.abstract:
+ abstract = True
+
+ def __init__(self, *fields, **attributes):
+ if self.abstract:
raise TypeError("abstract nodes are not instantiable")
- if fields:
- if len(fields) != len(self.fields):
- if not self.fields:
+ if fields:
+ if len(fields) != len(self.fields):
+ if not self.fields:
raise TypeError("%r takes 0 arguments" % self.__class__.__name__)
raise TypeError(
"%r takes 0 or %d argument%s"
@@ -127,897 +127,897 @@ class Node(with_metaclass(NodeType, object)):
len(self.fields) != 1 and "s" or "",
)
)
- for name, arg in izip(self.fields, fields):
- setattr(self, name, arg)
- for attr in self.attributes:
- setattr(self, attr, attributes.pop(attr, None))
- if attributes:
+ for name, arg in izip(self.fields, fields):
+ setattr(self, name, arg)
+ for attr in self.attributes:
+ setattr(self, attr, attributes.pop(attr, None))
+ if attributes:
raise TypeError("unknown attribute %r" % next(iter(attributes)))
-
- def iter_fields(self, exclude=None, only=None):
- """This method iterates over all fields that are defined and yields
- ``(key, value)`` tuples. Per default all fields are returned, but
- it's possible to limit that to some fields by providing the `only`
- parameter or to exclude some using the `exclude` parameter. Both
- should be sets or tuples of field names.
- """
- for name in self.fields:
+
+ def iter_fields(self, exclude=None, only=None):
+ """This method iterates over all fields that are defined and yields
+ ``(key, value)`` tuples. Per default all fields are returned, but
+ it's possible to limit that to some fields by providing the `only`
+ parameter or to exclude some using the `exclude` parameter. Both
+ should be sets or tuples of field names.
+ """
+ for name in self.fields:
if (
(exclude is only is None)
or (exclude is not None and name not in exclude)
or (only is not None and name in only)
):
- try:
- yield name, getattr(self, name)
- except AttributeError:
- pass
-
- def iter_child_nodes(self, exclude=None, only=None):
- """Iterates over all direct child nodes of the node. This iterates
- over all fields and yields the values of they are nodes. If the value
- of a field is a list all the nodes in that list are returned.
- """
+ try:
+ yield name, getattr(self, name)
+ except AttributeError:
+ pass
+
+ def iter_child_nodes(self, exclude=None, only=None):
+ """Iterates over all direct child nodes of the node. This iterates
+ over all fields and yields the values of they are nodes. If the value
+ of a field is a list all the nodes in that list are returned.
+ """
for _, item in self.iter_fields(exclude, only):
- if isinstance(item, list):
- for n in item:
- if isinstance(n, Node):
- yield n
- elif isinstance(item, Node):
- yield item
-
- def find(self, node_type):
- """Find the first node of a given type. If no such node exists the
- return value is `None`.
- """
- for result in self.find_all(node_type):
- return result
-
- def find_all(self, node_type):
- """Find all the nodes of a given type. If the type is a tuple,
- the check is performed for any of the tuple items.
- """
- for child in self.iter_child_nodes():
- if isinstance(child, node_type):
- yield child
- for result in child.find_all(node_type):
- yield result
-
- def set_ctx(self, ctx):
- """Reset the context of a node and all child nodes. Per default the
- parser will all generate nodes that have a 'load' context as it's the
- most common one. This method is used in the parser to set assignment
- targets and other nodes to a store context.
- """
- todo = deque([self])
- while todo:
- node = todo.popleft()
+ if isinstance(item, list):
+ for n in item:
+ if isinstance(n, Node):
+ yield n
+ elif isinstance(item, Node):
+ yield item
+
+ def find(self, node_type):
+ """Find the first node of a given type. If no such node exists the
+ return value is `None`.
+ """
+ for result in self.find_all(node_type):
+ return result
+
+ def find_all(self, node_type):
+ """Find all the nodes of a given type. If the type is a tuple,
+ the check is performed for any of the tuple items.
+ """
+ for child in self.iter_child_nodes():
+ if isinstance(child, node_type):
+ yield child
+ for result in child.find_all(node_type):
+ yield result
+
+ def set_ctx(self, ctx):
+ """Reset the context of a node and all child nodes. Per default the
+ parser will all generate nodes that have a 'load' context as it's the
+ most common one. This method is used in the parser to set assignment
+ targets and other nodes to a store context.
+ """
+ todo = deque([self])
+ while todo:
+ node = todo.popleft()
if "ctx" in node.fields:
- node.ctx = ctx
- todo.extend(node.iter_child_nodes())
- return self
-
- def set_lineno(self, lineno, override=False):
- """Set the line numbers of the node and children."""
- todo = deque([self])
- while todo:
- node = todo.popleft()
+ node.ctx = ctx
+ todo.extend(node.iter_child_nodes())
+ return self
+
+ def set_lineno(self, lineno, override=False):
+ """Set the line numbers of the node and children."""
+ todo = deque([self])
+ while todo:
+ node = todo.popleft()
if "lineno" in node.attributes:
- if node.lineno is None or override:
- node.lineno = lineno
- todo.extend(node.iter_child_nodes())
- return self
-
- def set_environment(self, environment):
- """Set the environment for all nodes."""
- todo = deque([self])
- while todo:
- node = todo.popleft()
- node.environment = environment
- todo.extend(node.iter_child_nodes())
- return self
-
- def __eq__(self, other):
+ if node.lineno is None or override:
+ node.lineno = lineno
+ todo.extend(node.iter_child_nodes())
+ return self
+
+ def set_environment(self, environment):
+ """Set the environment for all nodes."""
+ todo = deque([self])
+ while todo:
+ node = todo.popleft()
+ node.environment = environment
+ todo.extend(node.iter_child_nodes())
+ return self
+
+ def __eq__(self, other):
return type(self) is type(other) and tuple(self.iter_fields()) == tuple(
other.iter_fields()
)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- # Restore Python 2 hashing behavior on Python 3
- __hash__ = object.__hash__
-
- def __repr__(self):
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ # Restore Python 2 hashing behavior on Python 3
+ __hash__ = object.__hash__
+
+ def __repr__(self):
return "%s(%s)" % (
- self.__class__.__name__,
+ self.__class__.__name__,
", ".join("%s=%r" % (arg, getattr(self, arg, None)) for arg in self.fields),
- )
-
- def dump(self):
- def _dump(node):
- if not isinstance(node, Node):
- buf.append(repr(node))
- return
-
+ )
+
+ def dump(self):
+ def _dump(node):
+ if not isinstance(node, Node):
+ buf.append(repr(node))
+ return
+
buf.append("nodes.%s(" % node.__class__.__name__)
- if not node.fields:
+ if not node.fields:
buf.append(")")
- return
- for idx, field in enumerate(node.fields):
- if idx:
+ return
+ for idx, field in enumerate(node.fields):
+ if idx:
buf.append(", ")
- value = getattr(node, field)
- if isinstance(value, list):
+ value = getattr(node, field)
+ if isinstance(value, list):
buf.append("[")
- for idx, item in enumerate(value):
- if idx:
+ for idx, item in enumerate(value):
+ if idx:
buf.append(", ")
- _dump(item)
+ _dump(item)
buf.append("]")
- else:
- _dump(value)
+ else:
+ _dump(value)
buf.append(")")
- buf = []
- _dump(self)
+ buf = []
+ _dump(self)
return "".join(buf)
-
-
-class Stmt(Node):
- """Base node for all statements."""
-
- abstract = True
-
-
-class Helper(Node):
- """Nodes that exist in a specific context only."""
-
- abstract = True
-
-
-class Template(Node):
- """Node that represents a template. This must be the outermost node that
- is passed to the compiler.
- """
-
+
+
+class Stmt(Node):
+ """Base node for all statements."""
+
+ abstract = True
+
+
+class Helper(Node):
+ """Nodes that exist in a specific context only."""
+
+ abstract = True
+
+
+class Template(Node):
+ """Node that represents a template. This must be the outermost node that
+ is passed to the compiler.
+ """
+
fields = ("body",)
+
-
-class Output(Stmt):
- """A node that holds multiple expressions which are then printed out.
- This is used both for the `print` statement and the regular template data.
- """
-
+class Output(Stmt):
+ """A node that holds multiple expressions which are then printed out.
+ This is used both for the `print` statement and the regular template data.
+ """
+
fields = ("nodes",)
+
-
-class Extends(Stmt):
- """Represents an extends statement."""
-
+class Extends(Stmt):
+ """Represents an extends statement."""
+
fields = ("template",)
-
-
-class For(Stmt):
- """The for loop. `target` is the target for the iteration (usually a
- :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
- of nodes that are used as loop-body, and `else_` a list of nodes for the
- `else` block. If no else node exists it has to be an empty list.
-
- For filtered nodes an expression can be stored as `test`, otherwise `None`.
- """
-
+
+
+class For(Stmt):
+ """The for loop. `target` is the target for the iteration (usually a
+ :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
+ of nodes that are used as loop-body, and `else_` a list of nodes for the
+ `else` block. If no else node exists it has to be an empty list.
+
+ For filtered nodes an expression can be stored as `test`, otherwise `None`.
+ """
+
fields = ("target", "iter", "body", "else_", "test", "recursive")
+
-
-class If(Stmt):
- """If `test` is true, `body` is rendered, else `else_`."""
-
+class If(Stmt):
+ """If `test` is true, `body` is rendered, else `else_`."""
+
fields = ("test", "body", "elif_", "else_")
-
-
-class Macro(Stmt):
- """A macro definition. `name` is the name of the macro, `args` a list of
- arguments and `defaults` a list of defaults if there are any. `body` is
- a list of nodes for the macro body.
- """
-
+
+
+class Macro(Stmt):
+ """A macro definition. `name` is the name of the macro, `args` a list of
+ arguments and `defaults` a list of defaults if there are any. `body` is
+ a list of nodes for the macro body.
+ """
+
fields = ("name", "args", "defaults", "body")
+
-
-class CallBlock(Stmt):
- """Like a macro without a name but a call instead. `call` is called with
- the unnamed macro as `caller` argument this node holds.
- """
-
+class CallBlock(Stmt):
+ """Like a macro without a name but a call instead. `call` is called with
+ the unnamed macro as `caller` argument this node holds.
+ """
+
fields = ("call", "args", "defaults", "body")
+
-
-class FilterBlock(Stmt):
- """Node for filter sections."""
-
+class FilterBlock(Stmt):
+ """Node for filter sections."""
+
fields = ("body", "filter")
-
-
-class With(Stmt):
- """Specific node for with statements. In older versions of Jinja the
- with statement was implemented on the base of the `Scope` node instead.
-
- .. versionadded:: 2.9.3
- """
-
+
+
+class With(Stmt):
+ """Specific node for with statements. In older versions of Jinja the
+ with statement was implemented on the base of the `Scope` node instead.
+
+ .. versionadded:: 2.9.3
+ """
+
fields = ("targets", "values", "body")
+
-
-class Block(Stmt):
- """A node that represents a block."""
-
+class Block(Stmt):
+ """A node that represents a block."""
+
fields = ("name", "body", "scoped")
+
-
-class Include(Stmt):
- """A node that represents the include tag."""
-
+class Include(Stmt):
+ """A node that represents the include tag."""
+
fields = ("template", "with_context", "ignore_missing")
+
-
-class Import(Stmt):
- """A node that represents the import tag."""
-
+class Import(Stmt):
+ """A node that represents the import tag."""
+
fields = ("template", "target", "with_context")
-
-
-class FromImport(Stmt):
- """A node that represents the from import tag. It's important to not
- pass unsafe names to the name attribute. The compiler translates the
- attribute lookups directly into getattr calls and does *not* use the
- subscript callback of the interface. As exported variables may not
- start with double underscores (which the parser asserts) this is not a
- problem for regular Jinja code, but if this node is used in an extension
- extra care must be taken.
-
- The list of names may contain tuples if aliases are wanted.
- """
-
+
+
+class FromImport(Stmt):
+ """A node that represents the from import tag. It's important to not
+ pass unsafe names to the name attribute. The compiler translates the
+ attribute lookups directly into getattr calls and does *not* use the
+ subscript callback of the interface. As exported variables may not
+ start with double underscores (which the parser asserts) this is not a
+ problem for regular Jinja code, but if this node is used in an extension
+ extra care must be taken.
+
+ The list of names may contain tuples if aliases are wanted.
+ """
+
fields = ("template", "names", "with_context")
+
-
-class ExprStmt(Stmt):
- """A statement that evaluates an expression and discards the result."""
-
+class ExprStmt(Stmt):
+ """A statement that evaluates an expression and discards the result."""
+
fields = ("node",)
+
-
-class Assign(Stmt):
- """Assigns an expression to a target."""
-
+class Assign(Stmt):
+ """Assigns an expression to a target."""
+
fields = ("target", "node")
+
-
-class AssignBlock(Stmt):
- """Assigns a block to a target."""
-
+class AssignBlock(Stmt):
+ """Assigns a block to a target."""
+
fields = ("target", "filter", "body")
-
-
-class Expr(Node):
- """Baseclass for all expressions."""
-
- abstract = True
-
- def as_const(self, eval_ctx=None):
- """Return the value of the expression as constant or raise
- :exc:`Impossible` if this was not possible.
-
- An :class:`EvalContext` can be provided, if none is given
- a default context is created which requires the nodes to have
- an attached environment.
-
- .. versionchanged:: 2.4
- the `eval_ctx` parameter was added.
- """
- raise Impossible()
-
- def can_assign(self):
- """Check if it's possible to assign something to this node."""
- return False
-
-
-class BinExpr(Expr):
- """Baseclass for all binary expressions."""
+
+
+class Expr(Node):
+ """Baseclass for all expressions."""
+
+ abstract = True
+
+ def as_const(self, eval_ctx=None):
+ """Return the value of the expression as constant or raise
+ :exc:`Impossible` if this was not possible.
+
+ An :class:`EvalContext` can be provided, if none is given
+ a default context is created which requires the nodes to have
+ an attached environment.
+
+ .. versionchanged:: 2.4
+ the `eval_ctx` parameter was added.
+ """
+ raise Impossible()
+
+ def can_assign(self):
+ """Check if it's possible to assign something to this node."""
+ return False
+
+
+class BinExpr(Expr):
+ """Baseclass for all binary expressions."""
fields = ("left", "right")
- operator = None
- abstract = True
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- # intercepted operators cannot be folded at compile time
+ operator = None
+ abstract = True
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ # intercepted operators cannot be folded at compile time
if (
self.environment.sandboxed
and self.operator in self.environment.intercepted_binops
):
- raise Impossible()
- f = _binop_to_func[self.operator]
- try:
- return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
-
-class UnaryExpr(Expr):
- """Baseclass for all unary expressions."""
+ raise Impossible()
+ f = _binop_to_func[self.operator]
+ try:
+ return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+
+class UnaryExpr(Expr):
+ """Baseclass for all unary expressions."""
fields = ("node",)
- operator = None
- abstract = True
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- # intercepted operators cannot be folded at compile time
+ operator = None
+ abstract = True
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ # intercepted operators cannot be folded at compile time
if (
self.environment.sandboxed
and self.operator in self.environment.intercepted_unops
):
- raise Impossible()
- f = _uaop_to_func[self.operator]
- try:
- return f(self.node.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
-
-class Name(Expr):
- """Looks up a name or stores a value in a name.
- The `ctx` of the node can be one of the following values:
-
- - `store`: store a value in the name
- - `load`: load that name
- - `param`: like `store` but if the name was defined as function parameter.
- """
-
+ raise Impossible()
+ f = _uaop_to_func[self.operator]
+ try:
+ return f(self.node.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+
+class Name(Expr):
+ """Looks up a name or stores a value in a name.
+ The `ctx` of the node can be one of the following values:
+
+ - `store`: store a value in the name
+ - `load`: load that name
+ - `param`: like `store` but if the name was defined as function parameter.
+ """
+
fields = ("name", "ctx")
- def can_assign(self):
+ def can_assign(self):
return self.name not in ("true", "false", "none", "True", "False", "None")
-
-
-class NSRef(Expr):
- """Reference to a namespace value assignment"""
-
+
+
+class NSRef(Expr):
+ """Reference to a namespace value assignment"""
+
fields = ("name", "attr")
- def can_assign(self):
- # We don't need any special checks here; NSRef assignments have a
- # runtime check to ensure the target is a namespace object which will
- # have been checked already as it is created using a normal assignment
- # which goes through a `Name` node.
- return True
-
-
-class Literal(Expr):
- """Baseclass for literals."""
-
- abstract = True
-
-
-class Const(Literal):
- """All constant values. The parser will return this node for simple
- constants such as ``42`` or ``"foo"`` but it can be used to store more
- complex values such as lists too. Only constants with a safe
- representation (objects where ``eval(repr(x)) == x`` is true).
- """
-
+ def can_assign(self):
+ # We don't need any special checks here; NSRef assignments have a
+ # runtime check to ensure the target is a namespace object which will
+ # have been checked already as it is created using a normal assignment
+ # which goes through a `Name` node.
+ return True
+
+
+class Literal(Expr):
+ """Baseclass for literals."""
+
+ abstract = True
+
+
+class Const(Literal):
+ """All constant values. The parser will return this node for simple
+ constants such as ``42`` or ``"foo"`` but it can be used to store more
+ complex values such as lists too. Only constants with a safe
+ representation (objects where ``eval(repr(x)) == x`` is true).
+ """
+
fields = ("value",)
- def as_const(self, eval_ctx=None):
- rv = self.value
+ def as_const(self, eval_ctx=None):
+ rv = self.value
if (
PY2
and type(rv) is text_type
and self.environment.policies["compiler.ascii_str"]
):
- try:
+ try:
rv = rv.encode("ascii")
- except UnicodeError:
- pass
- return rv
-
- @classmethod
- def from_untrusted(cls, value, lineno=None, environment=None):
- """Return a const object if the value is representable as
- constant value in the generated code, otherwise it will raise
- an `Impossible` exception.
- """
- from .compiler import has_safe_repr
-
- if not has_safe_repr(value):
- raise Impossible()
- return cls(value, lineno=lineno, environment=environment)
-
-
-class TemplateData(Literal):
- """A constant template string."""
-
+ except UnicodeError:
+ pass
+ return rv
+
+ @classmethod
+ def from_untrusted(cls, value, lineno=None, environment=None):
+ """Return a const object if the value is representable as
+ constant value in the generated code, otherwise it will raise
+ an `Impossible` exception.
+ """
+ from .compiler import has_safe_repr
+
+ if not has_safe_repr(value):
+ raise Impossible()
+ return cls(value, lineno=lineno, environment=environment)
+
+
+class TemplateData(Literal):
+ """A constant template string."""
+
fields = ("data",)
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if eval_ctx.volatile:
- raise Impossible()
- if eval_ctx.autoescape:
- return Markup(self.data)
- return self.data
-
-
-class Tuple(Literal):
- """For loop unpacking and some other things like multiple arguments
- for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
- is used for loading the names or storing.
- """
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if eval_ctx.volatile:
+ raise Impossible()
+ if eval_ctx.autoescape:
+ return Markup(self.data)
+ return self.data
+
+
+class Tuple(Literal):
+ """For loop unpacking and some other things like multiple arguments
+ for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
+ is used for loading the names or storing.
+ """
+
fields = ("items", "ctx")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return tuple(x.as_const(eval_ctx) for x in self.items)
-
- def can_assign(self):
- for item in self.items:
- if not item.can_assign():
- return False
- return True
-
-
-class List(Literal):
- """Any list literal such as ``[1, 2, 3]``"""
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return tuple(x.as_const(eval_ctx) for x in self.items)
+
+ def can_assign(self):
+ for item in self.items:
+ if not item.can_assign():
+ return False
+ return True
+
+
+class List(Literal):
+ """Any list literal such as ``[1, 2, 3]``"""
+
fields = ("items",)
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return [x.as_const(eval_ctx) for x in self.items]
-
-
-class Dict(Literal):
- """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
- :class:`Pair` nodes.
- """
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return [x.as_const(eval_ctx) for x in self.items]
+
+
+class Dict(Literal):
+ """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
+ :class:`Pair` nodes.
+ """
+
fields = ("items",)
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return dict(x.as_const(eval_ctx) for x in self.items)
-
-
-class Pair(Helper):
- """A key, value pair for dicts."""
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return dict(x.as_const(eval_ctx) for x in self.items)
+
+
+class Pair(Helper):
+ """A key, value pair for dicts."""
+
fields = ("key", "value")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
-
-
-class Keyword(Helper):
- """A key, value pair for keyword arguments where key is a string."""
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
+
+
+class Keyword(Helper):
+ """A key, value pair for keyword arguments where key is a string."""
+
fields = ("key", "value")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.key, self.value.as_const(eval_ctx)
-
-
-class CondExpr(Expr):
- """A conditional expression (inline if expression). (``{{
- foo if bar else baz }}``)
- """
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.key, self.value.as_const(eval_ctx)
+
+
+class CondExpr(Expr):
+ """A conditional expression (inline if expression). (``{{
+ foo if bar else baz }}``)
+ """
+
fields = ("test", "expr1", "expr2")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if self.test.as_const(eval_ctx):
- return self.expr1.as_const(eval_ctx)
-
- # if we evaluate to an undefined object, we better do that at runtime
- if self.expr2 is None:
- raise Impossible()
-
- return self.expr2.as_const(eval_ctx)
-
-
-def args_as_const(node, eval_ctx):
- args = [x.as_const(eval_ctx) for x in node.args]
- kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
-
- if node.dyn_args is not None:
- try:
- args.extend(node.dyn_args.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
- if node.dyn_kwargs is not None:
- try:
- kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
- return args, kwargs
-
-
-class Filter(Expr):
- """This node applies a filter on an expression. `name` is the name of
- the filter, the rest of the fields are the same as for :class:`Call`.
-
- If the `node` of a filter is `None` the contents of the last buffer are
- filtered. Buffers are created by macros and filter blocks.
- """
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if self.test.as_const(eval_ctx):
+ return self.expr1.as_const(eval_ctx)
+
+ # if we evaluate to an undefined object, we better do that at runtime
+ if self.expr2 is None:
+ raise Impossible()
+
+ return self.expr2.as_const(eval_ctx)
+
+
+def args_as_const(node, eval_ctx):
+ args = [x.as_const(eval_ctx) for x in node.args]
+ kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
+
+ if node.dyn_args is not None:
+ try:
+ args.extend(node.dyn_args.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+ if node.dyn_kwargs is not None:
+ try:
+ kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
+ except Exception:
+ raise Impossible()
+
+ return args, kwargs
+
+
+class Filter(Expr):
+ """This node applies a filter on an expression. `name` is the name of
+ the filter, the rest of the fields are the same as for :class:`Call`.
+
+ If the `node` of a filter is `None` the contents of the last buffer are
+ filtered. Buffers are created by macros and filter blocks.
+ """
+
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
-
- if eval_ctx.volatile or self.node is None:
- raise Impossible()
-
- # we have to be careful here because we call filter_ below.
- # if this variable would be called filter, 2to3 would wrap the
+
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+
+ if eval_ctx.volatile or self.node is None:
+ raise Impossible()
+
+ # we have to be careful here because we call filter_ below.
+ # if this variable would be called filter, 2to3 would wrap the
# call in a list because it is assuming we are talking about the
- # builtin filter function here which no longer returns a list in
- # python 3. because of that, do not rename filter_ to filter!
- filter_ = self.environment.filters.get(self.name)
-
+ # builtin filter function here which no longer returns a list in
+ # python 3. because of that, do not rename filter_ to filter!
+ filter_ = self.environment.filters.get(self.name)
+
if filter_ is None or getattr(filter_, "contextfilter", False) is True:
- raise Impossible()
-
- # We cannot constant handle async filters, so we need to make sure
- # to not go down this path.
+ raise Impossible()
+
+ # We cannot constant handle async filters, so we need to make sure
+ # to not go down this path.
if eval_ctx.environment.is_async and getattr(
filter_, "asyncfiltervariant", False
- ):
- raise Impossible()
-
- args, kwargs = args_as_const(self, eval_ctx)
- args.insert(0, self.node.as_const(eval_ctx))
-
+ ):
+ raise Impossible()
+
+ args, kwargs = args_as_const(self, eval_ctx)
+ args.insert(0, self.node.as_const(eval_ctx))
+
if getattr(filter_, "evalcontextfilter", False) is True:
- args.insert(0, eval_ctx)
+ args.insert(0, eval_ctx)
elif getattr(filter_, "environmentfilter", False) is True:
- args.insert(0, self.environment)
-
- try:
- return filter_(*args, **kwargs)
- except Exception:
- raise Impossible()
-
-
-class Test(Expr):
- """Applies a test on an expression. `name` is the name of the test, the
- rest of the fields are the same as for :class:`Call`.
- """
-
+ args.insert(0, self.environment)
+
+ try:
+ return filter_(*args, **kwargs)
+ except Exception:
+ raise Impossible()
+
+
+class Test(Expr):
+ """Applies a test on an expression. `name` is the name of the test, the
+ rest of the fields are the same as for :class:`Call`.
+ """
+
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
- def as_const(self, eval_ctx=None):
- test = self.environment.tests.get(self.name)
-
- if test is None:
- raise Impossible()
-
- eval_ctx = get_eval_context(self, eval_ctx)
- args, kwargs = args_as_const(self, eval_ctx)
- args.insert(0, self.node.as_const(eval_ctx))
-
- try:
- return test(*args, **kwargs)
- except Exception:
- raise Impossible()
-
-
-class Call(Expr):
- """Calls an expression. `args` is a list of arguments, `kwargs` a list
- of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
- and `dyn_kwargs` has to be either `None` or a node that is used as
- node for dynamic positional (``*args``) or keyword (``**kwargs``)
- arguments.
- """
-
+
+ def as_const(self, eval_ctx=None):
+ test = self.environment.tests.get(self.name)
+
+ if test is None:
+ raise Impossible()
+
+ eval_ctx = get_eval_context(self, eval_ctx)
+ args, kwargs = args_as_const(self, eval_ctx)
+ args.insert(0, self.node.as_const(eval_ctx))
+
+ try:
+ return test(*args, **kwargs)
+ except Exception:
+ raise Impossible()
+
+
+class Call(Expr):
+ """Calls an expression. `args` is a list of arguments, `kwargs` a list
+ of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
+ and `dyn_kwargs` has to be either `None` or a node that is used as
+ node for dynamic positional (``*args``) or keyword (``**kwargs``)
+ arguments.
+ """
+
fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
+
-
-class Getitem(Expr):
- """Get an attribute or item from an expression and prefer the item."""
-
+class Getitem(Expr):
+ """Get an attribute or item from an expression and prefer the item."""
+
fields = ("node", "arg", "ctx")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != "load":
- raise Impossible()
- try:
+ raise Impossible()
+ try:
return self.environment.getitem(
self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
)
- except Exception:
- raise Impossible()
-
- def can_assign(self):
- return False
-
-
-class Getattr(Expr):
- """Get an attribute or item from an expression that is a ascii-only
- bytestring and prefer the attribute.
- """
-
+ except Exception:
+ raise Impossible()
+
+ def can_assign(self):
+ return False
+
+
+class Getattr(Expr):
+ """Get an attribute or item from an expression that is a ascii-only
+ bytestring and prefer the attribute.
+ """
+
fields = ("node", "attr", "ctx")
- def as_const(self, eval_ctx=None):
+ def as_const(self, eval_ctx=None):
if self.ctx != "load":
- raise Impossible()
- try:
- eval_ctx = get_eval_context(self, eval_ctx)
+ raise Impossible()
+ try:
+ eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
- except Exception:
- raise Impossible()
-
- def can_assign(self):
- return False
-
-
-class Slice(Expr):
- """Represents a slice object. This must only be used as argument for
- :class:`Subscript`.
- """
-
+ except Exception:
+ raise Impossible()
+
+ def can_assign(self):
+ return False
+
+
+class Slice(Expr):
+ """Represents a slice object. This must only be used as argument for
+ :class:`Subscript`.
+ """
+
fields = ("start", "stop", "step")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
-
- def const(obj):
- if obj is None:
- return None
- return obj.as_const(eval_ctx)
-
- return slice(const(self.start), const(self.stop), const(self.step))
-
-
-class Concat(Expr):
- """Concatenates the list of expressions provided after converting them to
- unicode.
- """
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+
+ def const(obj):
+ if obj is None:
+ return None
+ return obj.as_const(eval_ctx)
+
+ return slice(const(self.start), const(self.stop), const(self.step))
+
+
+class Concat(Expr):
+ """Concatenates the list of expressions provided after converting them to
+ unicode.
+ """
+
fields = ("nodes",)
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
return "".join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
-
-
-class Compare(Expr):
- """Compares an expression with some other expressions. `ops` must be a
- list of :class:`Operand`\\s.
- """
-
+
+
+class Compare(Expr):
+ """Compares an expression with some other expressions. `ops` must be a
+ list of :class:`Operand`\\s.
+ """
+
fields = ("expr", "ops")
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- result = value = self.expr.as_const(eval_ctx)
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ result = value = self.expr.as_const(eval_ctx)
- try:
- for op in self.ops:
- new_value = op.expr.as_const(eval_ctx)
- result = _cmpop_to_func[op.op](value, new_value)
+ try:
+ for op in self.ops:
+ new_value = op.expr.as_const(eval_ctx)
+ result = _cmpop_to_func[op.op](value, new_value)
if not result:
return False
- value = new_value
- except Exception:
- raise Impossible()
-
- return result
-
-
-class Operand(Helper):
- """Holds an operator and an expression."""
+ value = new_value
+ except Exception:
+ raise Impossible()
+ return result
+
+
+class Operand(Helper):
+ """Holds an operator and an expression."""
+
fields = ("op", "expr")
-if __debug__:
+if __debug__:
Operand.__doc__ += "\nThe following operators are available: " + ", ".join(
sorted(
"``%s``" % x
for x in set(_binop_to_func) | set(_uaop_to_func) | set(_cmpop_to_func)
)
)
-
-
-class Mul(BinExpr):
- """Multiplies the left with the right node."""
-
+
+
+class Mul(BinExpr):
+ """Multiplies the left with the right node."""
+
operator = "*"
+
-
-class Div(BinExpr):
- """Divides the left by the right node."""
-
+class Div(BinExpr):
+ """Divides the left by the right node."""
+
operator = "/"
+
-
-class FloorDiv(BinExpr):
- """Divides the left by the right node and truncates conver the
- result into an integer by truncating.
- """
-
+class FloorDiv(BinExpr):
+ """Divides the left by the right node and truncates conver the
+ result into an integer by truncating.
+ """
+
operator = "//"
+
-
-class Add(BinExpr):
- """Add the left to the right node."""
-
+class Add(BinExpr):
+ """Add the left to the right node."""
+
operator = "+"
+
-
-class Sub(BinExpr):
- """Subtract the right from the left node."""
-
+class Sub(BinExpr):
+ """Subtract the right from the left node."""
+
operator = "-"
+
-
-class Mod(BinExpr):
- """Left modulo right."""
-
+class Mod(BinExpr):
+ """Left modulo right."""
+
operator = "%"
+
-
-class Pow(BinExpr):
- """Left to the power of right."""
-
+class Pow(BinExpr):
+ """Left to the power of right."""
+
operator = "**"
+
-
-class And(BinExpr):
- """Short circuited AND."""
-
+class And(BinExpr):
+ """Short circuited AND."""
+
operator = "and"
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
-
-
-class Or(BinExpr):
- """Short circuited OR."""
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
+
+
+class Or(BinExpr):
+ """Short circuited OR."""
+
operator = "or"
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
-
-
-class Not(UnaryExpr):
- """Negate the expression."""
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
+
+
+class Not(UnaryExpr):
+ """Negate the expression."""
+
operator = "not"
+
-
-class Neg(UnaryExpr):
- """Make the expression negative."""
-
+class Neg(UnaryExpr):
+ """Make the expression negative."""
+
operator = "-"
+
-
-class Pos(UnaryExpr):
- """Make the expression positive (noop for most expressions)"""
-
+class Pos(UnaryExpr):
+ """Make the expression positive (noop for most expressions)"""
+
operator = "+"
-
-
-# Helpers for extensions
-
-
-class EnvironmentAttribute(Expr):
- """Loads an attribute from the environment object. This is useful for
- extensions that want to call a callback stored on the environment.
- """
-
+
+
+# Helpers for extensions
+
+
+class EnvironmentAttribute(Expr):
+ """Loads an attribute from the environment object. This is useful for
+ extensions that want to call a callback stored on the environment.
+ """
+
fields = ("name",)
-
-
-class ExtensionAttribute(Expr):
- """Returns the attribute of an extension bound to the environment.
- The identifier is the identifier of the :class:`Extension`.
-
- This node is usually constructed by calling the
- :meth:`~jinja2.ext.Extension.attr` method on an extension.
- """
-
+
+
+class ExtensionAttribute(Expr):
+ """Returns the attribute of an extension bound to the environment.
+ The identifier is the identifier of the :class:`Extension`.
+
+ This node is usually constructed by calling the
+ :meth:`~jinja2.ext.Extension.attr` method on an extension.
+ """
+
fields = ("identifier", "name")
-
-
-class ImportedName(Expr):
- """If created with an import name the import name is returned on node
- access. For example ``ImportedName('cgi.escape')`` returns the `escape`
- function from the cgi module on evaluation. Imports are optimized by the
- compiler so there is no need to assign them to local variables.
- """
-
+
+
+class ImportedName(Expr):
+ """If created with an import name the import name is returned on node
+ access. For example ``ImportedName('cgi.escape')`` returns the `escape`
+ function from the cgi module on evaluation. Imports are optimized by the
+ compiler so there is no need to assign them to local variables.
+ """
+
fields = ("importname",)
-
-
-class InternalName(Expr):
- """An internal name in the compiler. You cannot create these nodes
- yourself but the parser provides a
- :meth:`~jinja2.parser.Parser.free_identifier` method that creates
- a new identifier for you. This identifier is not available from the
- template and is not threated specially by the compiler.
- """
-
+
+
+class InternalName(Expr):
+ """An internal name in the compiler. You cannot create these nodes
+ yourself but the parser provides a
+ :meth:`~jinja2.parser.Parser.free_identifier` method that creates
+ a new identifier for you. This identifier is not available from the
+ template and is not threated specially by the compiler.
+ """
+
fields = ("name",)
- def __init__(self):
+ def __init__(self):
raise TypeError(
"Can't create internal names. Use the "
"`free_identifier` method on a parser."
)
-
-
-class MarkSafe(Expr):
- """Mark the wrapped expression as safe (wrap it as `Markup`)."""
-
+
+
+class MarkSafe(Expr):
+ """Mark the wrapped expression as safe (wrap it as `Markup`)."""
+
fields = ("expr",)
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return Markup(self.expr.as_const(eval_ctx))
-
-
-class MarkSafeIfAutoescape(Expr):
- """Mark the wrapped expression as safe (wrap it as `Markup`) but
- only if autoescaping is active.
-
- .. versionadded:: 2.5
- """
-
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ return Markup(self.expr.as_const(eval_ctx))
+
+
+class MarkSafeIfAutoescape(Expr):
+ """Mark the wrapped expression as safe (wrap it as `Markup`) but
+ only if autoescaping is active.
+
+ .. versionadded:: 2.5
+ """
+
fields = ("expr",)
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if eval_ctx.volatile:
- raise Impossible()
- expr = self.expr.as_const(eval_ctx)
- if eval_ctx.autoescape:
- return Markup(expr)
- return expr
-
-
-class ContextReference(Expr):
- """Returns the current template context. It can be used like a
- :class:`Name` node, with a ``'load'`` ctx and will return the
- current :class:`~jinja2.runtime.Context` object.
-
- Here an example that assigns the current template name to a
- variable named `foo`::
-
- Assign(Name('foo', ctx='store'),
- Getattr(ContextReference(), 'name'))
+ def as_const(self, eval_ctx=None):
+ eval_ctx = get_eval_context(self, eval_ctx)
+ if eval_ctx.volatile:
+ raise Impossible()
+ expr = self.expr.as_const(eval_ctx)
+ if eval_ctx.autoescape:
+ return Markup(expr)
+ return expr
+
+
+class ContextReference(Expr):
+ """Returns the current template context. It can be used like a
+ :class:`Name` node, with a ``'load'`` ctx and will return the
+ current :class:`~jinja2.runtime.Context` object.
+
+ Here an example that assigns the current template name to a
+ variable named `foo`::
+
+ Assign(Name('foo', ctx='store'),
+ Getattr(ContextReference(), 'name'))
This is basically equivalent to using the
:func:`~jinja2.contextfunction` decorator when using the
high-level API, which causes a reference to the context to be passed
as the first argument to a function.
- """
-
-
+ """
+
+
class DerivedContextReference(Expr):
"""Return the current template context including locals. Behaves
exactly like :class:`ContextReference`, but includes local
@@ -1027,60 +1027,60 @@ class DerivedContextReference(Expr):
"""
-class Continue(Stmt):
- """Continue a loop."""
-
-
-class Break(Stmt):
- """Break a loop."""
-
-
-class Scope(Stmt):
- """An artificial scope."""
-
+class Continue(Stmt):
+ """Continue a loop."""
+
+
+class Break(Stmt):
+ """Break a loop."""
+
+
+class Scope(Stmt):
+ """An artificial scope."""
+
fields = ("body",)
-
-
-class OverlayScope(Stmt):
- """An overlay scope for extensions. This is a largely unoptimized scope
- that however can be used to introduce completely arbitrary variables into
- a sub scope from a dictionary or dictionary like object. The `context`
- field has to evaluate to a dictionary object.
-
- Example usage::
-
- OverlayScope(context=self.call_method('get_context'),
- body=[...])
-
- .. versionadded:: 2.10
- """
-
+
+
+class OverlayScope(Stmt):
+ """An overlay scope for extensions. This is a largely unoptimized scope
+ that however can be used to introduce completely arbitrary variables into
+ a sub scope from a dictionary or dictionary like object. The `context`
+ field has to evaluate to a dictionary object.
+
+ Example usage::
+
+ OverlayScope(context=self.call_method('get_context'),
+ body=[...])
+
+ .. versionadded:: 2.10
+ """
+
fields = ("context", "body")
-
-
-class EvalContextModifier(Stmt):
- """Modifies the eval context. For each option that should be modified,
- a :class:`Keyword` has to be added to the :attr:`options` list.
-
- Example to change the `autoescape` setting::
-
- EvalContextModifier(options=[Keyword('autoescape', Const(True))])
- """
-
+
+
+class EvalContextModifier(Stmt):
+ """Modifies the eval context. For each option that should be modified,
+ a :class:`Keyword` has to be added to the :attr:`options` list.
+
+ Example to change the `autoescape` setting::
+
+ EvalContextModifier(options=[Keyword('autoescape', Const(True))])
+ """
+
fields = ("options",)
-
-
-class ScopedEvalContextModifier(EvalContextModifier):
- """Modifies the eval context and reverts it later. Works exactly like
- :class:`EvalContextModifier` but will only modify the
- :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
- """
-
+
+
+class ScopedEvalContextModifier(EvalContextModifier):
+ """Modifies the eval context and reverts it later. Works exactly like
+ :class:`EvalContextModifier` but will only modify the
+ :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
+ """
+
fields = ("body",)
+
-
-# make sure nobody creates custom nodes
-def _failing_new(*args, **kwargs):
+# make sure nobody creates custom nodes
+def _failing_new(*args, **kwargs):
raise TypeError("can't create custom node types")
diff --git a/contrib/python/Jinja2/py2/jinja2/optimizer.py b/contrib/python/Jinja2/py2/jinja2/optimizer.py
index 7bc78c4524..f4fddd926d 100644
--- a/contrib/python/Jinja2/py2/jinja2/optimizer.py
+++ b/contrib/python/Jinja2/py2/jinja2/optimizer.py
@@ -1,31 +1,31 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""The optimizer tries to constant fold expressions and modify the AST
in place so that it should be faster to evaluate.
-
+
Because the AST does not contain all the scoping information and the
compiler has to find that out, we cannot do all the optimizations we
want. For example, loop unrolling doesn't work because unrolled loops
would have a different scope. The solution would be a second syntax tree
that stored the scoping rules.
-"""
+"""
from . import nodes
from .visitor import NodeTransformer
-
-
-def optimize(node, environment):
- """The context hint can be used to perform an static optimization
- based on the context given."""
- optimizer = Optimizer(environment)
- return optimizer.visit(node)
-
-
-class Optimizer(NodeTransformer):
- def __init__(self, environment):
- self.environment = environment
-
+
+
+def optimize(node, environment):
+ """The context hint can be used to perform an static optimization
+ based on the context given."""
+ optimizer = Optimizer(environment)
+ return optimizer.visit(node)
+
+
+class Optimizer(NodeTransformer):
+ def __init__(self, environment):
+ self.environment = environment
+
def generic_visit(self, node, *args, **kwargs):
node = super(Optimizer, self).generic_visit(node, *args, **kwargs)
-
+
# Do constant folding. Some other nodes besides Expr have
# as_const, but folding them causes errors later on.
if isinstance(node, nodes.Expr):
diff --git a/contrib/python/Jinja2/py2/jinja2/parser.py b/contrib/python/Jinja2/py2/jinja2/parser.py
index d5881066f7..55a6de25a8 100644
--- a/contrib/python/Jinja2/py2/jinja2/parser.py
+++ b/contrib/python/Jinja2/py2/jinja2/parser.py
@@ -1,4 +1,4 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Parse tokens from the lexer into nodes for the compiler."""
from . import nodes
from ._compat import imap
@@ -6,7 +6,7 @@ from .exceptions import TemplateAssertionError
from .exceptions import TemplateSyntaxError
from .lexer import describe_token
from .lexer import describe_token_expr
-
+
_statement_keywords = frozenset(
[
"for",
@@ -24,313 +24,313 @@ _statement_keywords = frozenset(
]
)
_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
-
-_math_nodes = {
+
+_math_nodes = {
"add": nodes.Add,
"sub": nodes.Sub,
"mul": nodes.Mul,
"div": nodes.Div,
"floordiv": nodes.FloorDiv,
"mod": nodes.Mod,
-}
-
-
-class Parser(object):
+}
+
+
+class Parser(object):
"""This is the central parsing class Jinja uses. It's passed to
- extensions and can be used to parse expressions or statements.
- """
-
+ extensions and can be used to parse expressions or statements.
+ """
+
def __init__(self, environment, source, name=None, filename=None, state=None):
- self.environment = environment
- self.stream = environment._tokenize(source, name, filename, state)
- self.name = name
- self.filename = filename
- self.closed = False
- self.extensions = {}
- for extension in environment.iter_extensions():
- for tag in extension.tags:
- self.extensions[tag] = extension.parse
- self._last_identifier = 0
- self._tag_stack = []
- self._end_token_stack = []
-
- def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
- """Convenience method that raises `exc` with the message, passed
- line number or last line number as well as the current name and
- filename.
- """
- if lineno is None:
- lineno = self.stream.current.lineno
- raise exc(msg, lineno, self.name, self.filename)
-
- def _fail_ut_eof(self, name, end_token_stack, lineno):
- expected = []
- for exprs in end_token_stack:
- expected.extend(imap(describe_token_expr, exprs))
- if end_token_stack:
+ self.environment = environment
+ self.stream = environment._tokenize(source, name, filename, state)
+ self.name = name
+ self.filename = filename
+ self.closed = False
+ self.extensions = {}
+ for extension in environment.iter_extensions():
+ for tag in extension.tags:
+ self.extensions[tag] = extension.parse
+ self._last_identifier = 0
+ self._tag_stack = []
+ self._end_token_stack = []
+
+ def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
+ """Convenience method that raises `exc` with the message, passed
+ line number or last line number as well as the current name and
+ filename.
+ """
+ if lineno is None:
+ lineno = self.stream.current.lineno
+ raise exc(msg, lineno, self.name, self.filename)
+
+ def _fail_ut_eof(self, name, end_token_stack, lineno):
+ expected = []
+ for exprs in end_token_stack:
+ expected.extend(imap(describe_token_expr, exprs))
+ if end_token_stack:
currently_looking = " or ".join(
"'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1]
)
- else:
- currently_looking = None
-
- if name is None:
+ else:
+ currently_looking = None
+
+ if name is None:
message = ["Unexpected end of template."]
- else:
+ else:
message = ["Encountered unknown tag '%s'." % name]
-
- if currently_looking:
- if name is not None and name in expected:
+
+ if currently_looking:
+ if name is not None and name in expected:
message.append(
"You probably made a nesting mistake. Jinja "
"is expecting this tag, but currently looking "
"for %s." % currently_looking
)
- else:
+ else:
message.append(
"Jinja was looking for the following tags: "
"%s." % currently_looking
)
-
- if self._tag_stack:
+
+ if self._tag_stack:
message.append(
"The innermost block that needs to be "
"closed is '%s'." % self._tag_stack[-1]
)
-
+
self.fail(" ".join(message), lineno)
-
- def fail_unknown_tag(self, name, lineno=None):
- """Called if the parser encounters an unknown tag. Tries to fail
- with a human readable error message that could help to identify
- the problem.
- """
- return self._fail_ut_eof(name, self._end_token_stack, lineno)
-
- def fail_eof(self, end_tokens=None, lineno=None):
- """Like fail_unknown_tag but for end of template situations."""
- stack = list(self._end_token_stack)
- if end_tokens is not None:
- stack.append(end_tokens)
- return self._fail_ut_eof(None, stack, lineno)
-
- def is_tuple_end(self, extra_end_rules=None):
- """Are we at the end of a tuple?"""
+
+ def fail_unknown_tag(self, name, lineno=None):
+ """Called if the parser encounters an unknown tag. Tries to fail
+ with a human readable error message that could help to identify
+ the problem.
+ """
+ return self._fail_ut_eof(name, self._end_token_stack, lineno)
+
+ def fail_eof(self, end_tokens=None, lineno=None):
+ """Like fail_unknown_tag but for end of template situations."""
+ stack = list(self._end_token_stack)
+ if end_tokens is not None:
+ stack.append(end_tokens)
+ return self._fail_ut_eof(None, stack, lineno)
+
+ def is_tuple_end(self, extra_end_rules=None):
+ """Are we at the end of a tuple?"""
if self.stream.current.type in ("variable_end", "block_end", "rparen"):
- return True
- elif extra_end_rules is not None:
- return self.stream.current.test_any(extra_end_rules)
- return False
-
- def free_identifier(self, lineno=None):
- """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
- self._last_identifier += 1
- rv = object.__new__(nodes.InternalName)
+ return True
+ elif extra_end_rules is not None:
+ return self.stream.current.test_any(extra_end_rules)
+ return False
+
+ def free_identifier(self, lineno=None):
+ """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
+ self._last_identifier += 1
+ rv = object.__new__(nodes.InternalName)
nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno)
- return rv
-
- def parse_statement(self):
- """Parse a single statement."""
- token = self.stream.current
+ return rv
+
+ def parse_statement(self):
+ """Parse a single statement."""
+ token = self.stream.current
if token.type != "name":
self.fail("tag name expected", token.lineno)
- self._tag_stack.append(token.value)
- pop_tag = True
- try:
- if token.value in _statement_keywords:
+ self._tag_stack.append(token.value)
+ pop_tag = True
+ try:
+ if token.value in _statement_keywords:
return getattr(self, "parse_" + self.stream.current.value)()
if token.value == "call":
- return self.parse_call_block()
+ return self.parse_call_block()
if token.value == "filter":
- return self.parse_filter_block()
- ext = self.extensions.get(token.value)
- if ext is not None:
- return ext(self)
-
- # did not work out, remove the token we pushed by accident
- # from the stack so that the unknown tag fail function can
- # produce a proper error message.
- self._tag_stack.pop()
- pop_tag = False
- self.fail_unknown_tag(token.value, token.lineno)
- finally:
- if pop_tag:
- self._tag_stack.pop()
-
- def parse_statements(self, end_tokens, drop_needle=False):
- """Parse multiple statements into a list until one of the end tokens
- is reached. This is used to parse the body of statements as it also
- parses template data if appropriate. The parser checks first if the
- current token is a colon and skips it if there is one. Then it checks
- for the block end and parses until if one of the `end_tokens` is
- reached. Per default the active token in the stream at the end of
- the call is the matched end token. If this is not wanted `drop_needle`
- can be set to `True` and the end token is removed.
- """
- # the first token may be a colon for python compatibility
+ return self.parse_filter_block()
+ ext = self.extensions.get(token.value)
+ if ext is not None:
+ return ext(self)
+
+ # did not work out, remove the token we pushed by accident
+ # from the stack so that the unknown tag fail function can
+ # produce a proper error message.
+ self._tag_stack.pop()
+ pop_tag = False
+ self.fail_unknown_tag(token.value, token.lineno)
+ finally:
+ if pop_tag:
+ self._tag_stack.pop()
+
+ def parse_statements(self, end_tokens, drop_needle=False):
+ """Parse multiple statements into a list until one of the end tokens
+ is reached. This is used to parse the body of statements as it also
+ parses template data if appropriate. The parser checks first if the
+ current token is a colon and skips it if there is one. Then it checks
+ for the block end and parses until if one of the `end_tokens` is
+ reached. Per default the active token in the stream at the end of
+ the call is the matched end token. If this is not wanted `drop_needle`
+ can be set to `True` and the end token is removed.
+ """
+ # the first token may be a colon for python compatibility
self.stream.skip_if("colon")
-
- # in the future it would be possible to add whole code sections
- # by adding some sort of end of statement token and parsing those here.
+
+ # in the future it would be possible to add whole code sections
+ # by adding some sort of end of statement token and parsing those here.
self.stream.expect("block_end")
- result = self.subparse(end_tokens)
-
- # we reached the end of the template too early, the subparser
- # does not check for this, so we do that now
+ result = self.subparse(end_tokens)
+
+ # we reached the end of the template too early, the subparser
+ # does not check for this, so we do that now
if self.stream.current.type == "eof":
- self.fail_eof(end_tokens)
-
- if drop_needle:
- next(self.stream)
- return result
-
- def parse_set(self):
- """Parse an assign statement."""
- lineno = next(self.stream).lineno
- target = self.parse_assign_target(with_namespace=True)
+ self.fail_eof(end_tokens)
+
+ if drop_needle:
+ next(self.stream)
+ return result
+
+ def parse_set(self):
+ """Parse an assign statement."""
+ lineno = next(self.stream).lineno
+ target = self.parse_assign_target(with_namespace=True)
if self.stream.skip_if("assign"):
- expr = self.parse_tuple()
- return nodes.Assign(target, expr, lineno=lineno)
- filter_node = self.parse_filter(None)
+ expr = self.parse_tuple()
+ return nodes.Assign(target, expr, lineno=lineno)
+ filter_node = self.parse_filter(None)
body = self.parse_statements(("name:endset",), drop_needle=True)
- return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
-
- def parse_for(self):
- """Parse a for loop."""
+ return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
+
+ def parse_for(self):
+ """Parse a for loop."""
lineno = self.stream.expect("name:for").lineno
target = self.parse_assign_target(extra_end_rules=("name:in",))
self.stream.expect("name:in")
iter = self.parse_tuple(
with_condexpr=False, extra_end_rules=("name:recursive",)
)
- test = None
+ test = None
if self.stream.skip_if("name:if"):
- test = self.parse_expression()
+ test = self.parse_expression()
recursive = self.stream.skip_if("name:recursive")
body = self.parse_statements(("name:endfor", "name:else"))
if next(self.stream).value == "endfor":
- else_ = []
- else:
+ else_ = []
+ else:
else_ = self.parse_statements(("name:endfor",), drop_needle=True)
return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
-
- def parse_if(self):
- """Parse an if construct."""
+
+ def parse_if(self):
+ """Parse an if construct."""
node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
- while 1:
- node.test = self.parse_tuple(with_condexpr=False)
+ while 1:
+ node.test = self.parse_tuple(with_condexpr=False)
node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
- node.elif_ = []
- node.else_ = []
- token = next(self.stream)
+ node.elif_ = []
+ node.else_ = []
+ token = next(self.stream)
if token.test("name:elif"):
- node = nodes.If(lineno=self.stream.current.lineno)
- result.elif_.append(node)
- continue
+ node = nodes.If(lineno=self.stream.current.lineno)
+ result.elif_.append(node)
+ continue
elif token.test("name:else"):
result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
- break
- return result
-
- def parse_with(self):
- node = nodes.With(lineno=next(self.stream).lineno)
- targets = []
- values = []
+ break
+ return result
+
+ def parse_with(self):
+ node = nodes.With(lineno=next(self.stream).lineno)
+ targets = []
+ values = []
while self.stream.current.type != "block_end":
- if targets:
+ if targets:
self.stream.expect("comma")
- target = self.parse_assign_target()
+ target = self.parse_assign_target()
target.set_ctx("param")
- targets.append(target)
+ targets.append(target)
self.stream.expect("assign")
- values.append(self.parse_expression())
- node.targets = targets
- node.values = values
+ values.append(self.parse_expression())
+ node.targets = targets
+ node.values = values
node.body = self.parse_statements(("name:endwith",), drop_needle=True)
- return node
-
- def parse_autoescape(self):
- node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
+ return node
+
+ def parse_autoescape(self):
+ node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
node.options = [nodes.Keyword("autoescape", self.parse_expression())]
node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
- return nodes.Scope([node])
-
- def parse_block(self):
- node = nodes.Block(lineno=next(self.stream).lineno)
+ return nodes.Scope([node])
+
+ def parse_block(self):
+ node = nodes.Block(lineno=next(self.stream).lineno)
node.name = self.stream.expect("name").value
node.scoped = self.stream.skip_if("name:scoped")
-
- # common problem people encounter when switching from django
- # to jinja. we do not support hyphens in block names, so let's
- # raise a nicer error message in that case.
+
+ # common problem people encounter when switching from django
+ # to jinja. we do not support hyphens in block names, so let's
+ # raise a nicer error message in that case.
if self.stream.current.type == "sub":
self.fail(
"Block names in Jinja have to be valid Python "
"identifiers and may not contain hyphens, use an "
"underscore instead."
)
-
+
node.body = self.parse_statements(("name:endblock",), drop_needle=True)
self.stream.skip_if("name:" + node.name)
- return node
-
- def parse_extends(self):
- node = nodes.Extends(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- return node
-
- def parse_import_context(self, node, default):
+ return node
+
+ def parse_extends(self):
+ node = nodes.Extends(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
+ return node
+
+ def parse_import_context(self, node, default):
if self.stream.current.test_any(
"name:with", "name:without"
) and self.stream.look().test("name:context"):
node.with_context = next(self.stream).value == "with"
- self.stream.skip()
- else:
- node.with_context = default
- return node
-
- def parse_include(self):
- node = nodes.Include(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
+ self.stream.skip()
+ else:
+ node.with_context = default
+ return node
+
+ def parse_include(self):
+ node = nodes.Include(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
if self.stream.current.test("name:ignore") and self.stream.look().test(
"name:missing"
):
- node.ignore_missing = True
- self.stream.skip(2)
- else:
- node.ignore_missing = False
- return self.parse_import_context(node, True)
-
- def parse_import(self):
- node = nodes.Import(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
+ node.ignore_missing = True
+ self.stream.skip(2)
+ else:
+ node.ignore_missing = False
+ return self.parse_import_context(node, True)
+
+ def parse_import(self):
+ node = nodes.Import(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
self.stream.expect("name:as")
- node.target = self.parse_assign_target(name_only=True).name
- return self.parse_import_context(node, False)
-
- def parse_from(self):
- node = nodes.FromImport(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
+ node.target = self.parse_assign_target(name_only=True).name
+ return self.parse_import_context(node, False)
+
+ def parse_from(self):
+ node = nodes.FromImport(lineno=next(self.stream).lineno)
+ node.template = self.parse_expression()
self.stream.expect("name:import")
- node.names = []
-
- def parse_context():
+ node.names = []
+
+ def parse_context():
if self.stream.current.value in (
"with",
"without",
) and self.stream.look().test("name:context"):
node.with_context = next(self.stream).value == "with"
- self.stream.skip()
- return True
- return False
-
- while 1:
- if node.names:
+ self.stream.skip()
+ return True
+ return False
+
+ while 1:
+ if node.names:
self.stream.expect("comma")
if self.stream.current.type == "name":
- if parse_context():
- break
- target = self.parse_assign_target(name_only=True)
+ if parse_context():
+ break
+ target = self.parse_assign_target(name_only=True)
if target.name.startswith("_"):
self.fail(
"names starting with an underline can not be imported",
@@ -338,70 +338,70 @@ class Parser(object):
exc=TemplateAssertionError,
)
if self.stream.skip_if("name:as"):
- alias = self.parse_assign_target(name_only=True)
- node.names.append((target.name, alias.name))
- else:
- node.names.append(target.name)
+ alias = self.parse_assign_target(name_only=True)
+ node.names.append((target.name, alias.name))
+ else:
+ node.names.append(target.name)
if parse_context() or self.stream.current.type != "comma":
- break
- else:
+ break
+ else:
self.stream.expect("name")
if not hasattr(node, "with_context"):
- node.with_context = False
- return node
-
- def parse_signature(self, node):
- node.args = args = []
- node.defaults = defaults = []
+ node.with_context = False
+ return node
+
+ def parse_signature(self, node):
+ node.args = args = []
+ node.defaults = defaults = []
self.stream.expect("lparen")
while self.stream.current.type != "rparen":
- if args:
+ if args:
self.stream.expect("comma")
- arg = self.parse_assign_target(name_only=True)
+ arg = self.parse_assign_target(name_only=True)
arg.set_ctx("param")
if self.stream.skip_if("assign"):
- defaults.append(self.parse_expression())
- elif defaults:
+ defaults.append(self.parse_expression())
+ elif defaults:
self.fail("non-default argument follows default argument")
- args.append(arg)
+ args.append(arg)
self.stream.expect("rparen")
-
- def parse_call_block(self):
- node = nodes.CallBlock(lineno=next(self.stream).lineno)
+
+ def parse_call_block(self):
+ node = nodes.CallBlock(lineno=next(self.stream).lineno)
if self.stream.current.type == "lparen":
- self.parse_signature(node)
- else:
- node.args = []
- node.defaults = []
-
- node.call = self.parse_expression()
- if not isinstance(node.call, nodes.Call):
+ self.parse_signature(node)
+ else:
+ node.args = []
+ node.defaults = []
+
+ node.call = self.parse_expression()
+ if not isinstance(node.call, nodes.Call):
self.fail("expected call", node.lineno)
node.body = self.parse_statements(("name:endcall",), drop_needle=True)
- return node
-
- def parse_filter_block(self):
- node = nodes.FilterBlock(lineno=next(self.stream).lineno)
- node.filter = self.parse_filter(None, start_inline=True)
+ return node
+
+ def parse_filter_block(self):
+ node = nodes.FilterBlock(lineno=next(self.stream).lineno)
+ node.filter = self.parse_filter(None, start_inline=True)
node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
- return node
-
- def parse_macro(self):
- node = nodes.Macro(lineno=next(self.stream).lineno)
- node.name = self.parse_assign_target(name_only=True).name
- self.parse_signature(node)
+ return node
+
+ def parse_macro(self):
+ node = nodes.Macro(lineno=next(self.stream).lineno)
+ node.name = self.parse_assign_target(name_only=True).name
+ self.parse_signature(node)
node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
- return node
-
- def parse_print(self):
- node = nodes.Output(lineno=next(self.stream).lineno)
- node.nodes = []
+ return node
+
+ def parse_print(self):
+ node = nodes.Output(lineno=next(self.stream).lineno)
+ node.nodes = []
while self.stream.current.type != "block_end":
- if node.nodes:
+ if node.nodes:
self.stream.expect("comma")
- node.nodes.append(self.parse_expression())
- return node
-
+ node.nodes.append(self.parse_expression())
+ return node
+
def parse_assign_target(
self,
with_tuple=True,
@@ -410,195 +410,195 @@ class Parser(object):
with_namespace=False,
):
"""Parse an assignment target. As Jinja allows assignments to
- tuples, this function can parse all allowed assignment targets. Per
- default assignments to tuples are parsed, that can be disable however
- by setting `with_tuple` to `False`. If only assignments to names are
- wanted `name_only` can be set to `True`. The `extra_end_rules`
- parameter is forwarded to the tuple parsing function. If
- `with_namespace` is enabled, a namespace assignment may be parsed.
- """
+ tuples, this function can parse all allowed assignment targets. Per
+ default assignments to tuples are parsed, that can be disable however
+ by setting `with_tuple` to `False`. If only assignments to names are
+ wanted `name_only` can be set to `True`. The `extra_end_rules`
+ parameter is forwarded to the tuple parsing function. If
+ `with_namespace` is enabled, a namespace assignment may be parsed.
+ """
if with_namespace and self.stream.look().type == "dot":
token = self.stream.expect("name")
- next(self.stream) # dot
+ next(self.stream) # dot
attr = self.stream.expect("name")
- target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
- elif name_only:
+ target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
+ elif name_only:
token = self.stream.expect("name")
target = nodes.Name(token.value, "store", lineno=token.lineno)
- else:
- if with_tuple:
+ else:
+ if with_tuple:
target = self.parse_tuple(
simplified=True, extra_end_rules=extra_end_rules
)
- else:
- target = self.parse_primary()
+ else:
+ target = self.parse_primary()
target.set_ctx("store")
- if not target.can_assign():
+ if not target.can_assign():
self.fail(
"can't assign to %r" % target.__class__.__name__.lower(), target.lineno
)
- return target
-
- def parse_expression(self, with_condexpr=True):
- """Parse an expression. Per default all expressions are parsed, if
- the optional `with_condexpr` parameter is set to `False` conditional
- expressions are not parsed.
- """
- if with_condexpr:
- return self.parse_condexpr()
- return self.parse_or()
-
- def parse_condexpr(self):
- lineno = self.stream.current.lineno
- expr1 = self.parse_or()
+ return target
+
+ def parse_expression(self, with_condexpr=True):
+ """Parse an expression. Per default all expressions are parsed, if
+ the optional `with_condexpr` parameter is set to `False` conditional
+ expressions are not parsed.
+ """
+ if with_condexpr:
+ return self.parse_condexpr()
+ return self.parse_or()
+
+ def parse_condexpr(self):
+ lineno = self.stream.current.lineno
+ expr1 = self.parse_or()
while self.stream.skip_if("name:if"):
- expr2 = self.parse_or()
+ expr2 = self.parse_or()
if self.stream.skip_if("name:else"):
- expr3 = self.parse_condexpr()
- else:
- expr3 = None
- expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
- lineno = self.stream.current.lineno
- return expr1
-
- def parse_or(self):
- lineno = self.stream.current.lineno
- left = self.parse_and()
+ expr3 = self.parse_condexpr()
+ else:
+ expr3 = None
+ expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return expr1
+
+ def parse_or(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_and()
while self.stream.skip_if("name:or"):
- right = self.parse_and()
- left = nodes.Or(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_and(self):
- lineno = self.stream.current.lineno
- left = self.parse_not()
+ right = self.parse_and()
+ left = nodes.Or(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_and(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_not()
while self.stream.skip_if("name:and"):
- right = self.parse_not()
- left = nodes.And(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_not(self):
+ right = self.parse_not()
+ left = nodes.And(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_not(self):
if self.stream.current.test("name:not"):
- lineno = next(self.stream).lineno
- return nodes.Not(self.parse_not(), lineno=lineno)
- return self.parse_compare()
-
- def parse_compare(self):
- lineno = self.stream.current.lineno
- expr = self.parse_math1()
- ops = []
- while 1:
- token_type = self.stream.current.type
- if token_type in _compare_operators:
- next(self.stream)
- ops.append(nodes.Operand(token_type, self.parse_math1()))
+ lineno = next(self.stream).lineno
+ return nodes.Not(self.parse_not(), lineno=lineno)
+ return self.parse_compare()
+
+ def parse_compare(self):
+ lineno = self.stream.current.lineno
+ expr = self.parse_math1()
+ ops = []
+ while 1:
+ token_type = self.stream.current.type
+ if token_type in _compare_operators:
+ next(self.stream)
+ ops.append(nodes.Operand(token_type, self.parse_math1()))
elif self.stream.skip_if("name:in"):
ops.append(nodes.Operand("in", self.parse_math1()))
elif self.stream.current.test("name:not") and self.stream.look().test(
"name:in"
):
- self.stream.skip(2)
+ self.stream.skip(2)
ops.append(nodes.Operand("notin", self.parse_math1()))
- else:
- break
- lineno = self.stream.current.lineno
- if not ops:
- return expr
- return nodes.Compare(expr, ops, lineno=lineno)
-
- def parse_math1(self):
- lineno = self.stream.current.lineno
- left = self.parse_concat()
+ else:
+ break
+ lineno = self.stream.current.lineno
+ if not ops:
+ return expr
+ return nodes.Compare(expr, ops, lineno=lineno)
+
+ def parse_math1(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_concat()
while self.stream.current.type in ("add", "sub"):
- cls = _math_nodes[self.stream.current.type]
- next(self.stream)
- right = self.parse_concat()
- left = cls(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_concat(self):
- lineno = self.stream.current.lineno
- args = [self.parse_math2()]
+ cls = _math_nodes[self.stream.current.type]
+ next(self.stream)
+ right = self.parse_concat()
+ left = cls(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_concat(self):
+ lineno = self.stream.current.lineno
+ args = [self.parse_math2()]
while self.stream.current.type == "tilde":
- next(self.stream)
- args.append(self.parse_math2())
- if len(args) == 1:
- return args[0]
- return nodes.Concat(args, lineno=lineno)
-
- def parse_math2(self):
- lineno = self.stream.current.lineno
- left = self.parse_pow()
+ next(self.stream)
+ args.append(self.parse_math2())
+ if len(args) == 1:
+ return args[0]
+ return nodes.Concat(args, lineno=lineno)
+
+ def parse_math2(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_pow()
while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
- cls = _math_nodes[self.stream.current.type]
- next(self.stream)
- right = self.parse_pow()
- left = cls(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_pow(self):
- lineno = self.stream.current.lineno
- left = self.parse_unary()
+ cls = _math_nodes[self.stream.current.type]
+ next(self.stream)
+ right = self.parse_pow()
+ left = cls(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_pow(self):
+ lineno = self.stream.current.lineno
+ left = self.parse_unary()
while self.stream.current.type == "pow":
- next(self.stream)
- right = self.parse_unary()
- left = nodes.Pow(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_unary(self, with_filter=True):
- token_type = self.stream.current.type
- lineno = self.stream.current.lineno
+ next(self.stream)
+ right = self.parse_unary()
+ left = nodes.Pow(left, right, lineno=lineno)
+ lineno = self.stream.current.lineno
+ return left
+
+ def parse_unary(self, with_filter=True):
+ token_type = self.stream.current.type
+ lineno = self.stream.current.lineno
if token_type == "sub":
- next(self.stream)
- node = nodes.Neg(self.parse_unary(False), lineno=lineno)
+ next(self.stream)
+ node = nodes.Neg(self.parse_unary(False), lineno=lineno)
elif token_type == "add":
- next(self.stream)
- node = nodes.Pos(self.parse_unary(False), lineno=lineno)
- else:
- node = self.parse_primary()
- node = self.parse_postfix(node)
- if with_filter:
- node = self.parse_filter_expr(node)
- return node
-
- def parse_primary(self):
- token = self.stream.current
+ next(self.stream)
+ node = nodes.Pos(self.parse_unary(False), lineno=lineno)
+ else:
+ node = self.parse_primary()
+ node = self.parse_postfix(node)
+ if with_filter:
+ node = self.parse_filter_expr(node)
+ return node
+
+ def parse_primary(self):
+ token = self.stream.current
if token.type == "name":
if token.value in ("true", "false", "True", "False"):
node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
elif token.value in ("none", "None"):
- node = nodes.Const(None, lineno=token.lineno)
- else:
+ node = nodes.Const(None, lineno=token.lineno)
+ else:
node = nodes.Name(token.value, "load", lineno=token.lineno)
- next(self.stream)
+ next(self.stream)
elif token.type == "string":
- next(self.stream)
- buf = [token.value]
- lineno = token.lineno
+ next(self.stream)
+ buf = [token.value]
+ lineno = token.lineno
while self.stream.current.type == "string":
- buf.append(self.stream.current.value)
- next(self.stream)
+ buf.append(self.stream.current.value)
+ next(self.stream)
node = nodes.Const("".join(buf), lineno=lineno)
elif token.type in ("integer", "float"):
- next(self.stream)
- node = nodes.Const(token.value, lineno=token.lineno)
+ next(self.stream)
+ node = nodes.Const(token.value, lineno=token.lineno)
elif token.type == "lparen":
- next(self.stream)
- node = self.parse_tuple(explicit_parentheses=True)
+ next(self.stream)
+ node = self.parse_tuple(explicit_parentheses=True)
self.stream.expect("rparen")
elif token.type == "lbracket":
- node = self.parse_list()
+ node = self.parse_list()
elif token.type == "lbrace":
- node = self.parse_dict()
- else:
- self.fail("unexpected '%s'" % describe_token(token), token.lineno)
- return node
-
+ node = self.parse_dict()
+ else:
+ self.fail("unexpected '%s'" % describe_token(token), token.lineno)
+ return node
+
def parse_tuple(
self,
simplified=False,
@@ -606,261 +606,261 @@ class Parser(object):
extra_end_rules=None,
explicit_parentheses=False,
):
- """Works like `parse_expression` but if multiple expressions are
- delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
- This method could also return a regular expression instead of a tuple
- if no commas where found.
-
- The default parsing mode is a full tuple. If `simplified` is `True`
- only names and literals are parsed. The `no_condexpr` parameter is
- forwarded to :meth:`parse_expression`.
-
- Because tuples do not require delimiters and may end in a bogus comma
- an extra hint is needed that marks the end of a tuple. For example
- for loops support tuples between `for` and `in`. In that case the
- `extra_end_rules` is set to ``['name:in']``.
-
- `explicit_parentheses` is true if the parsing was triggered by an
- expression in parentheses. This is used to figure out if an empty
- tuple is a valid expression or not.
- """
- lineno = self.stream.current.lineno
- if simplified:
- parse = self.parse_primary
- elif with_condexpr:
- parse = self.parse_expression
- else:
+ """Works like `parse_expression` but if multiple expressions are
+ delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
+ This method could also return a regular expression instead of a tuple
+ if no commas where found.
+
+ The default parsing mode is a full tuple. If `simplified` is `True`
+ only names and literals are parsed. The `no_condexpr` parameter is
+ forwarded to :meth:`parse_expression`.
+
+ Because tuples do not require delimiters and may end in a bogus comma
+ an extra hint is needed that marks the end of a tuple. For example
+ for loops support tuples between `for` and `in`. In that case the
+ `extra_end_rules` is set to ``['name:in']``.
+
+ `explicit_parentheses` is true if the parsing was triggered by an
+ expression in parentheses. This is used to figure out if an empty
+ tuple is a valid expression or not.
+ """
+ lineno = self.stream.current.lineno
+ if simplified:
+ parse = self.parse_primary
+ elif with_condexpr:
+ parse = self.parse_expression
+ else:
def parse():
return self.parse_expression(with_condexpr=False)
- args = []
- is_tuple = False
- while 1:
- if args:
+ args = []
+ is_tuple = False
+ while 1:
+ if args:
self.stream.expect("comma")
- if self.is_tuple_end(extra_end_rules):
- break
- args.append(parse())
+ if self.is_tuple_end(extra_end_rules):
+ break
+ args.append(parse())
if self.stream.current.type == "comma":
- is_tuple = True
- else:
- break
- lineno = self.stream.current.lineno
-
- if not is_tuple:
- if args:
- return args[0]
-
- # if we don't have explicit parentheses, an empty tuple is
- # not a valid expression. This would mean nothing (literally
- # nothing) in the spot of an expression would be an empty
- # tuple.
- if not explicit_parentheses:
+ is_tuple = True
+ else:
+ break
+ lineno = self.stream.current.lineno
+
+ if not is_tuple:
+ if args:
+ return args[0]
+
+ # if we don't have explicit parentheses, an empty tuple is
+ # not a valid expression. This would mean nothing (literally
+ # nothing) in the spot of an expression would be an empty
+ # tuple.
+ if not explicit_parentheses:
self.fail(
"Expected an expression, got '%s'"
% describe_token(self.stream.current)
)
-
+
return nodes.Tuple(args, "load", lineno=lineno)
-
- def parse_list(self):
+
+ def parse_list(self):
token = self.stream.expect("lbracket")
- items = []
+ items = []
while self.stream.current.type != "rbracket":
- if items:
+ if items:
self.stream.expect("comma")
if self.stream.current.type == "rbracket":
- break
- items.append(self.parse_expression())
+ break
+ items.append(self.parse_expression())
self.stream.expect("rbracket")
- return nodes.List(items, lineno=token.lineno)
-
- def parse_dict(self):
+ return nodes.List(items, lineno=token.lineno)
+
+ def parse_dict(self):
token = self.stream.expect("lbrace")
- items = []
+ items = []
while self.stream.current.type != "rbrace":
- if items:
+ if items:
self.stream.expect("comma")
if self.stream.current.type == "rbrace":
- break
- key = self.parse_expression()
+ break
+ key = self.parse_expression()
self.stream.expect("colon")
- value = self.parse_expression()
- items.append(nodes.Pair(key, value, lineno=key.lineno))
+ value = self.parse_expression()
+ items.append(nodes.Pair(key, value, lineno=key.lineno))
self.stream.expect("rbrace")
- return nodes.Dict(items, lineno=token.lineno)
-
- def parse_postfix(self, node):
- while 1:
- token_type = self.stream.current.type
+ return nodes.Dict(items, lineno=token.lineno)
+
+ def parse_postfix(self, node):
+ while 1:
+ token_type = self.stream.current.type
if token_type == "dot" or token_type == "lbracket":
- node = self.parse_subscript(node)
- # calls are valid both after postfix expressions (getattr
- # and getitem) as well as filters and tests
+ node = self.parse_subscript(node)
+ # calls are valid both after postfix expressions (getattr
+ # and getitem) as well as filters and tests
elif token_type == "lparen":
- node = self.parse_call(node)
- else:
- break
- return node
-
- def parse_filter_expr(self, node):
- while 1:
- token_type = self.stream.current.type
+ node = self.parse_call(node)
+ else:
+ break
+ return node
+
+ def parse_filter_expr(self, node):
+ while 1:
+ token_type = self.stream.current.type
if token_type == "pipe":
- node = self.parse_filter(node)
+ node = self.parse_filter(node)
elif token_type == "name" and self.stream.current.value == "is":
- node = self.parse_test(node)
- # calls are valid both after postfix expressions (getattr
- # and getitem) as well as filters and tests
+ node = self.parse_test(node)
+ # calls are valid both after postfix expressions (getattr
+ # and getitem) as well as filters and tests
elif token_type == "lparen":
- node = self.parse_call(node)
- else:
- break
- return node
-
- def parse_subscript(self, node):
- token = next(self.stream)
+ node = self.parse_call(node)
+ else:
+ break
+ return node
+
+ def parse_subscript(self, node):
+ token = next(self.stream)
if token.type == "dot":
- attr_token = self.stream.current
- next(self.stream)
+ attr_token = self.stream.current
+ next(self.stream)
if attr_token.type == "name":
return nodes.Getattr(
node, attr_token.value, "load", lineno=token.lineno
)
elif attr_token.type != "integer":
self.fail("expected name or number", attr_token.lineno)
- arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
+ arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
return nodes.Getitem(node, arg, "load", lineno=token.lineno)
if token.type == "lbracket":
- args = []
+ args = []
while self.stream.current.type != "rbracket":
- if args:
+ if args:
self.stream.expect("comma")
- args.append(self.parse_subscribed())
+ args.append(self.parse_subscribed())
self.stream.expect("rbracket")
- if len(args) == 1:
- arg = args[0]
- else:
+ if len(args) == 1:
+ arg = args[0]
+ else:
arg = nodes.Tuple(args, "load", lineno=token.lineno)
return nodes.Getitem(node, arg, "load", lineno=token.lineno)
self.fail("expected subscript expression", token.lineno)
-
- def parse_subscribed(self):
- lineno = self.stream.current.lineno
-
+
+ def parse_subscribed(self):
+ lineno = self.stream.current.lineno
+
if self.stream.current.type == "colon":
- next(self.stream)
- args = [None]
- else:
- node = self.parse_expression()
+ next(self.stream)
+ args = [None]
+ else:
+ node = self.parse_expression()
if self.stream.current.type != "colon":
- return node
- next(self.stream)
- args = [node]
-
+ return node
+ next(self.stream)
+ args = [node]
+
if self.stream.current.type == "colon":
- args.append(None)
+ args.append(None)
elif self.stream.current.type not in ("rbracket", "comma"):
- args.append(self.parse_expression())
- else:
- args.append(None)
-
+ args.append(self.parse_expression())
+ else:
+ args.append(None)
+
if self.stream.current.type == "colon":
- next(self.stream)
+ next(self.stream)
if self.stream.current.type not in ("rbracket", "comma"):
- args.append(self.parse_expression())
- else:
- args.append(None)
- else:
- args.append(None)
-
- return nodes.Slice(lineno=lineno, *args)
-
- def parse_call(self, node):
+ args.append(self.parse_expression())
+ else:
+ args.append(None)
+ else:
+ args.append(None)
+
+ return nodes.Slice(lineno=lineno, *args)
+
+ def parse_call(self, node):
token = self.stream.expect("lparen")
- args = []
- kwargs = []
- dyn_args = dyn_kwargs = None
- require_comma = False
-
- def ensure(expr):
- if not expr:
+ args = []
+ kwargs = []
+ dyn_args = dyn_kwargs = None
+ require_comma = False
+
+ def ensure(expr):
+ if not expr:
self.fail("invalid syntax for function call expression", token.lineno)
-
+
while self.stream.current.type != "rparen":
- if require_comma:
+ if require_comma:
self.stream.expect("comma")
- # support for trailing comma
+ # support for trailing comma
if self.stream.current.type == "rparen":
- break
+ break
if self.stream.current.type == "mul":
- ensure(dyn_args is None and dyn_kwargs is None)
- next(self.stream)
- dyn_args = self.parse_expression()
+ ensure(dyn_args is None and dyn_kwargs is None)
+ next(self.stream)
+ dyn_args = self.parse_expression()
elif self.stream.current.type == "pow":
- ensure(dyn_kwargs is None)
- next(self.stream)
- dyn_kwargs = self.parse_expression()
- else:
+ ensure(dyn_kwargs is None)
+ next(self.stream)
+ dyn_kwargs = self.parse_expression()
+ else:
if (
self.stream.current.type == "name"
and self.stream.look().type == "assign"
):
# Parsing a kwarg
ensure(dyn_kwargs is None)
- key = self.stream.current.value
- self.stream.skip(2)
- value = self.parse_expression()
+ key = self.stream.current.value
+ self.stream.skip(2)
+ value = self.parse_expression()
kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
- else:
+ else:
# Parsing an arg
ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
- args.append(self.parse_expression())
-
- require_comma = True
+ args.append(self.parse_expression())
+
+ require_comma = True
self.stream.expect("rparen")
-
- if node is None:
- return args, kwargs, dyn_args, dyn_kwargs
+
+ if node is None:
+ return args, kwargs, dyn_args, dyn_kwargs
return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
-
- def parse_filter(self, node, start_inline=False):
+
+ def parse_filter(self, node, start_inline=False):
while self.stream.current.type == "pipe" or start_inline:
- if not start_inline:
- next(self.stream)
+ if not start_inline:
+ next(self.stream)
token = self.stream.expect("name")
- name = token.value
+ name = token.value
while self.stream.current.type == "dot":
- next(self.stream)
+ next(self.stream)
name += "." + self.stream.expect("name").value
if self.stream.current.type == "lparen":
- args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
- else:
- args = []
- kwargs = []
- dyn_args = dyn_kwargs = None
+ args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
+ else:
+ args = []
+ kwargs = []
+ dyn_args = dyn_kwargs = None
node = nodes.Filter(
node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
)
- start_inline = False
- return node
-
- def parse_test(self, node):
- token = next(self.stream)
+ start_inline = False
+ return node
+
+ def parse_test(self, node):
+ token = next(self.stream)
if self.stream.current.test("name:not"):
- next(self.stream)
- negated = True
- else:
- negated = False
+ next(self.stream)
+ negated = True
+ else:
+ negated = False
name = self.stream.expect("name").value
while self.stream.current.type == "dot":
- next(self.stream)
+ next(self.stream)
name += "." + self.stream.expect("name").value
- dyn_args = dyn_kwargs = None
- kwargs = []
+ dyn_args = dyn_kwargs = None
+ kwargs = []
if self.stream.current.type == "lparen":
- args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
+ args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
elif self.stream.current.type in (
"name",
"string",
@@ -875,65 +875,65 @@ class Parser(object):
arg_node = self.parse_primary()
arg_node = self.parse_postfix(arg_node)
args = [arg_node]
- else:
- args = []
+ else:
+ args = []
node = nodes.Test(
node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
)
- if negated:
- node = nodes.Not(node, lineno=token.lineno)
- return node
-
- def subparse(self, end_tokens=None):
- body = []
- data_buffer = []
- add_data = data_buffer.append
-
- if end_tokens is not None:
- self._end_token_stack.append(end_tokens)
-
- def flush_data():
- if data_buffer:
- lineno = data_buffer[0].lineno
- body.append(nodes.Output(data_buffer[:], lineno=lineno))
- del data_buffer[:]
-
- try:
- while self.stream:
- token = self.stream.current
+ if negated:
+ node = nodes.Not(node, lineno=token.lineno)
+ return node
+
+ def subparse(self, end_tokens=None):
+ body = []
+ data_buffer = []
+ add_data = data_buffer.append
+
+ if end_tokens is not None:
+ self._end_token_stack.append(end_tokens)
+
+ def flush_data():
+ if data_buffer:
+ lineno = data_buffer[0].lineno
+ body.append(nodes.Output(data_buffer[:], lineno=lineno))
+ del data_buffer[:]
+
+ try:
+ while self.stream:
+ token = self.stream.current
if token.type == "data":
- if token.value:
+ if token.value:
add_data(nodes.TemplateData(token.value, lineno=token.lineno))
- next(self.stream)
+ next(self.stream)
elif token.type == "variable_begin":
- next(self.stream)
- add_data(self.parse_tuple(with_condexpr=True))
+ next(self.stream)
+ add_data(self.parse_tuple(with_condexpr=True))
self.stream.expect("variable_end")
elif token.type == "block_begin":
- flush_data()
- next(self.stream)
+ flush_data()
+ next(self.stream)
if end_tokens is not None and self.stream.current.test_any(
*end_tokens
):
- return body
- rv = self.parse_statement()
- if isinstance(rv, list):
- body.extend(rv)
- else:
- body.append(rv)
+ return body
+ rv = self.parse_statement()
+ if isinstance(rv, list):
+ body.extend(rv)
+ else:
+ body.append(rv)
self.stream.expect("block_end")
- else:
+ else:
raise AssertionError("internal parsing error")
-
- flush_data()
- finally:
- if end_tokens is not None:
- self._end_token_stack.pop()
-
- return body
-
- def parse(self):
- """Parse the whole template into a `Template` node."""
- result = nodes.Template(self.subparse(), lineno=1)
- result.set_environment(self.environment)
- return result
+
+ flush_data()
+ finally:
+ if end_tokens is not None:
+ self._end_token_stack.pop()
+
+ return body
+
+ def parse(self):
+ """Parse the whole template into a `Template` node."""
+ result = nodes.Template(self.subparse(), lineno=1)
+ result.set_environment(self.environment)
+ return result
diff --git a/contrib/python/Jinja2/py2/jinja2/runtime.py b/contrib/python/Jinja2/py2/jinja2/runtime.py
index 3ad7968624..850f82da1c 100644
--- a/contrib/python/Jinja2/py2/jinja2/runtime.py
+++ b/contrib/python/Jinja2/py2/jinja2/runtime.py
@@ -1,13 +1,13 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""The runtime functions and state used by compiled templates."""
-import sys
-from itertools import chain
-from types import MethodType
-
+import sys
+from itertools import chain
+from types import MethodType
+
from markupsafe import escape # noqa: F401
from markupsafe import Markup
from markupsafe import soft_unicode
-
+
from ._compat import abc
from ._compat import imap
from ._compat import implements_iterator
@@ -27,8 +27,8 @@ from .utils import internalcode
from .utils import missing
from .utils import Namespace # noqa: F401
from .utils import object_type_repr
-
-# these variables are exported to the template runtime
+
+# these variables are exported to the template runtime
exported = [
"LoopContext",
"TemplateReference",
@@ -46,35 +46,35 @@ exported = [
"Namespace",
"Undefined",
]
-
-#: the name of the function that is used to convert something into
-#: a string. We can just use the text type here.
-to_string = text_type
-
-
+
+#: the name of the function that is used to convert something into
+#: a string. We can just use the text type here.
+to_string = text_type
+
+
def identity(x):
"""Returns its argument. Useful for certain things in the
environment.
"""
return x
-
-
-def markup_join(seq):
- """Concatenation that escapes if necessary and converts to unicode."""
- buf = []
- iterator = imap(soft_unicode, seq)
- for arg in iterator:
- buf.append(arg)
+
+
+def markup_join(seq):
+ """Concatenation that escapes if necessary and converts to unicode."""
+ buf = []
+ iterator = imap(soft_unicode, seq)
+ for arg in iterator:
+ buf.append(arg)
if hasattr(arg, "__html__"):
return Markup(u"").join(chain(buf, iterator))
- return concat(buf)
-
-
-def unicode_join(seq):
- """Simple args to unicode conversion and concatenation."""
- return concat(imap(text_type, seq))
-
-
+ return concat(buf)
+
+
+def unicode_join(seq):
+ """Simple args to unicode conversion and concatenation."""
+ return concat(imap(text_type, seq))
+
+
def new_context(
environment,
template_name,
@@ -85,311 +85,311 @@ def new_context(
locals=None,
):
"""Internal helper for context creation."""
- if vars is None:
- vars = {}
- if shared:
- parent = vars
- else:
- parent = dict(globals or (), **vars)
- if locals:
- # if the parent is shared a copy should be created because
- # we don't want to modify the dict passed
- if shared:
- parent = dict(parent)
- for key, value in iteritems(locals):
- if value is not missing:
- parent[key] = value
+ if vars is None:
+ vars = {}
+ if shared:
+ parent = vars
+ else:
+ parent = dict(globals or (), **vars)
+ if locals:
+ # if the parent is shared a copy should be created because
+ # we don't want to modify the dict passed
+ if shared:
+ parent = dict(parent)
+ for key, value in iteritems(locals):
+ if value is not missing:
+ parent[key] = value
return environment.context_class(environment, parent, template_name, blocks)
-
-
-class TemplateReference(object):
- """The `self` in templates."""
-
- def __init__(self, context):
- self.__context = context
-
- def __getitem__(self, name):
- blocks = self.__context.blocks[name]
- return BlockReference(name, self.__context, blocks, 0)
-
- def __repr__(self):
+
+
+class TemplateReference(object):
+ """The `self` in templates."""
+
+ def __init__(self, context):
+ self.__context = context
+
+ def __getitem__(self, name):
+ blocks = self.__context.blocks[name]
+ return BlockReference(name, self.__context, blocks, 0)
+
+ def __repr__(self):
return "<%s %r>" % (self.__class__.__name__, self.__context.name)
-
-
-def _get_func(x):
+
+
+def _get_func(x):
return getattr(x, "__func__", x)
-
-
-class ContextMeta(type):
+
+
+class ContextMeta(type):
def __new__(mcs, name, bases, d):
rv = type.__new__(mcs, name, bases, d)
- if bases == ():
- return rv
-
- resolve = _get_func(rv.resolve)
- default_resolve = _get_func(Context.resolve)
- resolve_or_missing = _get_func(rv.resolve_or_missing)
- default_resolve_or_missing = _get_func(Context.resolve_or_missing)
-
- # If we have a changed resolve but no changed default or missing
- # resolve we invert the call logic.
+ if bases == ():
+ return rv
+
+ resolve = _get_func(rv.resolve)
+ default_resolve = _get_func(Context.resolve)
+ resolve_or_missing = _get_func(rv.resolve_or_missing)
+ default_resolve_or_missing = _get_func(Context.resolve_or_missing)
+
+ # If we have a changed resolve but no changed default or missing
+ # resolve we invert the call logic.
if (
resolve is not default_resolve
and resolve_or_missing is default_resolve_or_missing
):
- rv._legacy_resolve_mode = True
+ rv._legacy_resolve_mode = True
elif (
resolve is default_resolve
and resolve_or_missing is default_resolve_or_missing
):
- rv._fast_resolve_mode = True
-
- return rv
-
-
-def resolve_or_missing(context, key, missing=missing):
- if key in context.vars:
- return context.vars[key]
- if key in context.parent:
- return context.parent[key]
- return missing
-
-
-class Context(with_metaclass(ContextMeta)):
- """The template context holds the variables of a template. It stores the
- values passed to the template and also the names the template exports.
- Creating instances is neither supported nor useful as it's created
- automatically at various stages of the template evaluation and should not
- be created by hand.
-
- The context is immutable. Modifications on :attr:`parent` **must not**
- happen and modifications on :attr:`vars` are allowed from generated
- template code only. Template filters and global functions marked as
- :func:`contextfunction`\\s get the active context passed as first argument
- and are allowed to access the context read-only.
-
- The template context supports read only dict operations (`get`,
- `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
- `__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
- method that doesn't fail with a `KeyError` but returns an
- :class:`Undefined` object for missing variables.
- """
-
- # XXX: we want to eventually make this be a deprecation warning and
- # remove it.
- _legacy_resolve_mode = False
- _fast_resolve_mode = False
-
- def __init__(self, environment, parent, name, blocks):
- self.parent = parent
- self.vars = {}
- self.environment = environment
- self.eval_ctx = EvalContext(self.environment, name)
- self.exported_vars = set()
- self.name = name
-
- # create the initial mapping of blocks. Whenever template inheritance
- # takes place the runtime will update this mapping with the new blocks
- # from the template.
- self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
-
- # In case we detect the fast resolve mode we can set up an alias
- # here that bypasses the legacy code logic.
- if self._fast_resolve_mode:
- self.resolve_or_missing = MethodType(resolve_or_missing, self)
-
- def super(self, name, current):
- """Render a parent block."""
- try:
- blocks = self.blocks[name]
- index = blocks.index(current) + 1
- blocks[index]
- except LookupError:
+ rv._fast_resolve_mode = True
+
+ return rv
+
+
+def resolve_or_missing(context, key, missing=missing):
+ if key in context.vars:
+ return context.vars[key]
+ if key in context.parent:
+ return context.parent[key]
+ return missing
+
+
+class Context(with_metaclass(ContextMeta)):
+ """The template context holds the variables of a template. It stores the
+ values passed to the template and also the names the template exports.
+ Creating instances is neither supported nor useful as it's created
+ automatically at various stages of the template evaluation and should not
+ be created by hand.
+
+ The context is immutable. Modifications on :attr:`parent` **must not**
+ happen and modifications on :attr:`vars` are allowed from generated
+ template code only. Template filters and global functions marked as
+ :func:`contextfunction`\\s get the active context passed as first argument
+ and are allowed to access the context read-only.
+
+ The template context supports read only dict operations (`get`,
+ `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
+ `__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
+ method that doesn't fail with a `KeyError` but returns an
+ :class:`Undefined` object for missing variables.
+ """
+
+ # XXX: we want to eventually make this be a deprecation warning and
+ # remove it.
+ _legacy_resolve_mode = False
+ _fast_resolve_mode = False
+
+ def __init__(self, environment, parent, name, blocks):
+ self.parent = parent
+ self.vars = {}
+ self.environment = environment
+ self.eval_ctx = EvalContext(self.environment, name)
+ self.exported_vars = set()
+ self.name = name
+
+ # create the initial mapping of blocks. Whenever template inheritance
+ # takes place the runtime will update this mapping with the new blocks
+ # from the template.
+ self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
+
+ # In case we detect the fast resolve mode we can set up an alias
+ # here that bypasses the legacy code logic.
+ if self._fast_resolve_mode:
+ self.resolve_or_missing = MethodType(resolve_or_missing, self)
+
+ def super(self, name, current):
+ """Render a parent block."""
+ try:
+ blocks = self.blocks[name]
+ index = blocks.index(current) + 1
+ blocks[index]
+ except LookupError:
return self.environment.undefined(
"there is no parent block called %r." % name, name="super"
)
- return BlockReference(name, self, blocks, index)
-
- def get(self, key, default=None):
- """Returns an item from the template context, if it doesn't exist
- `default` is returned.
- """
- try:
- return self[key]
- except KeyError:
- return default
-
- def resolve(self, key):
- """Looks up a variable like `__getitem__` or `get` but returns an
- :class:`Undefined` object with the name of the name looked up.
- """
- if self._legacy_resolve_mode:
- rv = resolve_or_missing(self, key)
- else:
- rv = self.resolve_or_missing(key)
- if rv is missing:
- return self.environment.undefined(name=key)
- return rv
-
- def resolve_or_missing(self, key):
- """Resolves a variable like :meth:`resolve` but returns the
- special `missing` value if it cannot be found.
- """
- if self._legacy_resolve_mode:
- rv = self.resolve(key)
- if isinstance(rv, Undefined):
- rv = missing
- return rv
- return resolve_or_missing(self, key)
-
- def get_exported(self):
- """Get a new dict with the exported variables."""
- return dict((k, self.vars[k]) for k in self.exported_vars)
-
- def get_all(self):
- """Return the complete context as dict including the exported
- variables. For optimizations reasons this might not return an
- actual copy so be careful with using it.
- """
- if not self.vars:
- return self.parent
- if not self.parent:
- return self.vars
- return dict(self.parent, **self.vars)
-
- @internalcode
+ return BlockReference(name, self, blocks, index)
+
+ def get(self, key, default=None):
+ """Returns an item from the template context, if it doesn't exist
+ `default` is returned.
+ """
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def resolve(self, key):
+ """Looks up a variable like `__getitem__` or `get` but returns an
+ :class:`Undefined` object with the name of the name looked up.
+ """
+ if self._legacy_resolve_mode:
+ rv = resolve_or_missing(self, key)
+ else:
+ rv = self.resolve_or_missing(key)
+ if rv is missing:
+ return self.environment.undefined(name=key)
+ return rv
+
+ def resolve_or_missing(self, key):
+ """Resolves a variable like :meth:`resolve` but returns the
+ special `missing` value if it cannot be found.
+ """
+ if self._legacy_resolve_mode:
+ rv = self.resolve(key)
+ if isinstance(rv, Undefined):
+ rv = missing
+ return rv
+ return resolve_or_missing(self, key)
+
+ def get_exported(self):
+ """Get a new dict with the exported variables."""
+ return dict((k, self.vars[k]) for k in self.exported_vars)
+
+ def get_all(self):
+ """Return the complete context as dict including the exported
+ variables. For optimizations reasons this might not return an
+ actual copy so be careful with using it.
+ """
+ if not self.vars:
+ return self.parent
+ if not self.parent:
+ return self.vars
+ return dict(self.parent, **self.vars)
+
+ @internalcode
def call(__self, __obj, *args, **kwargs): # noqa: B902
- """Call the callable with the arguments and keyword arguments
- provided but inject the active context or environment as first
- argument if the callable is a :func:`contextfunction` or
- :func:`environmentfunction`.
- """
- if __debug__:
- __traceback_hide__ = True # noqa
-
- # Allow callable classes to take a context
+ """Call the callable with the arguments and keyword arguments
+ provided but inject the active context or environment as first
+ argument if the callable is a :func:`contextfunction` or
+ :func:`environmentfunction`.
+ """
+ if __debug__:
+ __traceback_hide__ = True # noqa
+
+ # Allow callable classes to take a context
if hasattr(__obj, "__call__"): # noqa: B004
- fn = __obj.__call__
+ fn = __obj.__call__
for fn_type in (
"contextfunction",
"evalcontextfunction",
"environmentfunction",
):
- if hasattr(fn, fn_type):
- __obj = fn
- break
-
+ if hasattr(fn, fn_type):
+ __obj = fn
+ break
+
if callable(__obj):
if getattr(__obj, "contextfunction", False) is True:
- args = (__self,) + args
+ args = (__self,) + args
elif getattr(__obj, "evalcontextfunction", False) is True:
- args = (__self.eval_ctx,) + args
+ args = (__self.eval_ctx,) + args
elif getattr(__obj, "environmentfunction", False) is True:
- args = (__self.environment,) + args
- try:
- return __obj(*args, **kwargs)
- except StopIteration:
+ args = (__self.environment,) + args
+ try:
+ return __obj(*args, **kwargs)
+ except StopIteration:
return __self.environment.undefined(
"value was undefined because "
"a callable raised a "
"StopIteration exception"
)
-
- def derived(self, locals=None):
- """Internal helper function to create a derived context. This is
- used in situations where the system needs a new context in the same
- template that is independent.
- """
+
+ def derived(self, locals=None):
+ """Internal helper function to create a derived context. This is
+ used in situations where the system needs a new context in the same
+ template that is independent.
+ """
context = new_context(
self.environment, self.name, {}, self.get_all(), True, None, locals
)
- context.eval_ctx = self.eval_ctx
- context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
- return context
-
+ context.eval_ctx = self.eval_ctx
+ context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
+ return context
+
def _all(meth): # noqa: B902
def proxy(self):
return getattr(self.get_all(), meth)()
- proxy.__doc__ = getattr(dict, meth).__doc__
- proxy.__name__ = meth
- return proxy
-
+ proxy.__doc__ = getattr(dict, meth).__doc__
+ proxy.__name__ = meth
+ return proxy
+
keys = _all("keys")
values = _all("values")
items = _all("items")
-
- # not available on python 3
- if PY2:
+
+ # not available on python 3
+ if PY2:
iterkeys = _all("iterkeys")
itervalues = _all("itervalues")
iteritems = _all("iteritems")
- del _all
-
- def __contains__(self, name):
- return name in self.vars or name in self.parent
-
- def __getitem__(self, key):
- """Lookup a variable or raise `KeyError` if the variable is
- undefined.
- """
- item = self.resolve_or_missing(key)
- if item is missing:
- raise KeyError(key)
- return item
-
- def __repr__(self):
+ del _all
+
+ def __contains__(self, name):
+ return name in self.vars or name in self.parent
+
+ def __getitem__(self, key):
+ """Lookup a variable or raise `KeyError` if the variable is
+ undefined.
+ """
+ item = self.resolve_or_missing(key)
+ if item is missing:
+ raise KeyError(key)
+ return item
+
+ def __repr__(self):
return "<%s %s of %r>" % (
- self.__class__.__name__,
- repr(self.get_all()),
+ self.__class__.__name__,
+ repr(self.get_all()),
self.name,
- )
-
-
+ )
+
+
abc.Mapping.register(Context)
-
-
-class BlockReference(object):
- """One block on a template reference."""
-
- def __init__(self, name, context, stack, depth):
- self.name = name
- self._context = context
- self._stack = stack
- self._depth = depth
-
- @property
- def super(self):
- """Super the block."""
- if self._depth + 1 >= len(self._stack):
+
+
+class BlockReference(object):
+ """One block on a template reference."""
+
+ def __init__(self, name, context, stack, depth):
+ self.name = name
+ self._context = context
+ self._stack = stack
+ self._depth = depth
+
+ @property
+ def super(self):
+ """Super the block."""
+ if self._depth + 1 >= len(self._stack):
return self._context.environment.undefined(
"there is no parent block called %r." % self.name, name="super"
)
return BlockReference(self.name, self._context, self._stack, self._depth + 1)
-
- @internalcode
- def __call__(self):
- rv = concat(self._stack[self._depth](self._context))
- if self._context.eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
+
+ @internalcode
+ def __call__(self):
+ rv = concat(self._stack[self._depth](self._context))
+ if self._context.eval_ctx.autoescape:
+ rv = Markup(rv)
+ return rv
+
+
@implements_iterator
class LoopContext:
"""A wrapper iterable for dynamic ``for`` loops, with information
about the loop and iteration.
"""
-
+
#: Current iteration of the loop, starting at 0.
index0 = -1
- _length = None
+ _length = None
_after = missing
_current = missing
_before = missing
_last_changed_value = missing
-
+
def __init__(self, iterable, undefined, recurse=None, depth0=0):
"""
:param iterable: Iterable to wrap.
@@ -401,68 +401,68 @@ class LoopContext:
"""
self._iterable = iterable
self._iterator = self._to_iterator(iterable)
- self._undefined = undefined
- self._recurse = recurse
+ self._undefined = undefined
+ self._recurse = recurse
#: How many levels deep a recursive loop currently is, starting at 0.
- self.depth0 = depth0
-
+ self.depth0 = depth0
+
@staticmethod
def _to_iterator(iterable):
return iter(iterable)
-
+
@property
def length(self):
"""Length of the iterable.
-
+
If the iterable is a generator or otherwise does not have a
size, it is eagerly evaluated to get a size.
"""
if self._length is not None:
return self._length
-
+
try:
self._length = len(self._iterable)
except TypeError:
iterable = list(self._iterator)
self._iterator = self._to_iterator(iterable)
self._length = len(iterable) + self.index + (self._after is not missing)
-
+
return self._length
-
- def __len__(self):
- return self.length
-
+
+ def __len__(self):
+ return self.length
+
@property
def depth(self):
"""How many levels deep a recursive loop currently is, starting at 1."""
return self.depth0 + 1
-
+
@property
def index(self):
"""Current iteration of the loop, starting at 1."""
return self.index0 + 1
-
+
@property
def revindex0(self):
"""Number of iterations from the end of the loop, ending at 0.
-
+
Requires calculating :attr:`length`.
"""
return self.length - self.index
-
+
@property
def revindex(self):
"""Number of iterations from the end of the loop, ending at 1.
-
+
Requires calculating :attr:`length`.
"""
return self.length - self.index0
-
+
@property
def first(self):
"""Whether this is the first iteration of the loop."""
return self.index0 == 0
-
+
def _peek_next(self):
"""Return the next element in the iterable, or :data:`missing`
if the iterable is exhausted. Only peeks one item ahead, caching
@@ -475,16 +475,16 @@ class LoopContext:
self._after = next(self._iterator, missing)
return self._after
- @property
+ @property
def last(self):
"""Whether this is the last iteration of the loop.
-
+
Causes the iterable to advance early. See
:func:`itertools.groupby` for issues this can cause.
The :func:`groupby` filter avoids that issue.
"""
return self._peek_next() is missing
-
+
@property
def previtem(self):
"""The item in the previous iteration. Undefined during the
@@ -492,20 +492,20 @@ class LoopContext:
"""
if self.first:
return self._undefined("there is no previous item")
-
+
return self._before
-
+
@property
def nextitem(self):
"""The item in the next iteration. Undefined during the last
iteration.
-
+
Causes the iterable to advance early. See
:func:`itertools.groupby` for issues this can cause.
The :func:`groupby` filter avoids that issue.
"""
rv = self._peek_next()
-
+
if rv is missing:
return self._undefined("there is no next item")
@@ -534,21 +534,21 @@ class LoopContext:
return False
- def __iter__(self):
- return self
-
- def __next__(self):
+ def __iter__(self):
+ return self
+
+ def __next__(self):
if self._after is not missing:
rv = self._after
self._after = missing
else:
rv = next(self._iterator)
-
+
self.index0 += 1
self._before = self._current
self._current = rv
return rv, self
-
+
@internalcode
def __call__(self, iterable):
"""When iterating over nested data, render the body of the loop
@@ -567,9 +567,9 @@ class LoopContext:
return "<%s %d/%d>" % (self.__class__.__name__, self.index, self.length)
-class Macro(object):
- """Wraps a macro function."""
-
+class Macro(object):
+ """Wraps a macro function."""
+
def __init__(
self,
environment,
@@ -581,79 +581,79 @@ class Macro(object):
caller,
default_autoescape=None,
):
- self._environment = environment
- self._func = func
- self._argument_count = len(arguments)
- self.name = name
- self.arguments = arguments
- self.catch_kwargs = catch_kwargs
- self.catch_varargs = catch_varargs
- self.caller = caller
+ self._environment = environment
+ self._func = func
+ self._argument_count = len(arguments)
+ self.name = name
+ self.arguments = arguments
+ self.catch_kwargs = catch_kwargs
+ self.catch_varargs = catch_varargs
+ self.caller = caller
self.explicit_caller = "caller" in arguments
- if default_autoescape is None:
- default_autoescape = environment.autoescape
- self._default_autoescape = default_autoescape
-
- @internalcode
- @evalcontextfunction
- def __call__(self, *args, **kwargs):
- # This requires a bit of explanation, In the past we used to
- # decide largely based on compile-time information if a macro is
- # safe or unsafe. While there was a volatile mode it was largely
- # unused for deciding on escaping. This turns out to be
+ if default_autoescape is None:
+ default_autoescape = environment.autoescape
+ self._default_autoescape = default_autoescape
+
+ @internalcode
+ @evalcontextfunction
+ def __call__(self, *args, **kwargs):
+ # This requires a bit of explanation, In the past we used to
+ # decide largely based on compile-time information if a macro is
+ # safe or unsafe. While there was a volatile mode it was largely
+ # unused for deciding on escaping. This turns out to be
# problematic for macros because whether a macro is safe depends not
# on the escape mode when it was defined, but rather when it was used.
- #
- # Because however we export macros from the module system and
- # there are historic callers that do not pass an eval context (and
- # will continue to not pass one), we need to perform an instance
- # check here.
- #
- # This is considered safe because an eval context is not a valid
+ #
+ # Because however we export macros from the module system and
+ # there are historic callers that do not pass an eval context (and
+ # will continue to not pass one), we need to perform an instance
+ # check here.
+ #
+ # This is considered safe because an eval context is not a valid
# argument to callables otherwise anyway. Worst case here is
- # that if no eval context is passed we fall back to the compile
- # time autoescape flag.
- if args and isinstance(args[0], EvalContext):
- autoescape = args[0].autoescape
- args = args[1:]
- else:
- autoescape = self._default_autoescape
-
- # try to consume the positional arguments
+ # that if no eval context is passed we fall back to the compile
+ # time autoescape flag.
+ if args and isinstance(args[0], EvalContext):
+ autoescape = args[0].autoescape
+ args = args[1:]
+ else:
+ autoescape = self._default_autoescape
+
+ # try to consume the positional arguments
arguments = list(args[: self._argument_count])
- off = len(arguments)
-
- # For information why this is necessary refer to the handling
- # of caller in the `macro_body` handler in the compiler.
- found_caller = False
-
- # if the number of arguments consumed is not the number of
- # arguments expected we start filling in keyword arguments
- # and defaults.
- if off != self._argument_count:
+ off = len(arguments)
+
+ # For information why this is necessary refer to the handling
+ # of caller in the `macro_body` handler in the compiler.
+ found_caller = False
+
+ # if the number of arguments consumed is not the number of
+ # arguments expected we start filling in keyword arguments
+ # and defaults.
+ if off != self._argument_count:
for name in self.arguments[len(arguments) :]:
- try:
- value = kwargs.pop(name)
- except KeyError:
- value = missing
+ try:
+ value = kwargs.pop(name)
+ except KeyError:
+ value = missing
if name == "caller":
- found_caller = True
- arguments.append(value)
- else:
- found_caller = self.explicit_caller
-
- # it's important that the order of these arguments does not change
- # if not also changed in the compiler's `function_scoping` method.
- # the order is caller, keyword arguments, positional arguments!
- if self.caller and not found_caller:
+ found_caller = True
+ arguments.append(value)
+ else:
+ found_caller = self.explicit_caller
+
+ # it's important that the order of these arguments does not change
+ # if not also changed in the compiler's `function_scoping` method.
+ # the order is caller, keyword arguments, positional arguments!
+ if self.caller and not found_caller:
caller = kwargs.pop("caller", None)
- if caller is None:
+ if caller is None:
caller = self._environment.undefined("No caller defined", name="caller")
- arguments.append(caller)
-
- if self.catch_kwargs:
- arguments.append(kwargs)
- elif kwargs:
+ arguments.append(caller)
+
+ if self.catch_kwargs:
+ arguments.append(kwargs)
+ elif kwargs:
if "caller" in kwargs:
raise TypeError(
"macro %r was invoked with two values for "
@@ -664,46 +664,46 @@ class Macro(object):
"macro %r takes no keyword argument %r"
% (self.name, next(iter(kwargs)))
)
- if self.catch_varargs:
+ if self.catch_varargs:
arguments.append(args[self._argument_count :])
- elif len(args) > self._argument_count:
+ elif len(args) > self._argument_count:
raise TypeError(
"macro %r takes not more than %d argument(s)"
% (self.name, len(self.arguments))
)
-
- return self._invoke(arguments, autoescape)
-
- def _invoke(self, arguments, autoescape):
- """This method is being swapped out by the async implementation."""
- rv = self._func(*arguments)
- if autoescape:
- rv = Markup(rv)
- return rv
-
- def __repr__(self):
+
+ return self._invoke(arguments, autoescape)
+
+ def _invoke(self, arguments, autoescape):
+ """This method is being swapped out by the async implementation."""
+ rv = self._func(*arguments)
+ if autoescape:
+ rv = Markup(rv)
+ return rv
+
+ def __repr__(self):
return "<%s %s>" % (
- self.__class__.__name__,
+ self.__class__.__name__,
self.name is None and "anonymous" or repr(self.name),
- )
-
-
-@implements_to_string
-class Undefined(object):
- """The default undefined type. This undefined type can be printed and
+ )
+
+
+@implements_to_string
+class Undefined(object):
+ """The default undefined type. This undefined type can be printed and
iterated over, but every other access will raise an :exc:`UndefinedError`:
-
- >>> foo = Undefined(name='foo')
- >>> str(foo)
- ''
- >>> not foo
- True
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
+
+ >>> foo = Undefined(name='foo')
+ >>> str(foo)
+ ''
+ >>> not foo
+ True
+ >>> foo + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ """
+
__slots__ = (
"_undefined_hint",
"_undefined_obj",
@@ -711,12 +711,12 @@ class Undefined(object):
"_undefined_exception",
)
- def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
- self._undefined_hint = hint
- self._undefined_obj = obj
- self._undefined_name = name
- self._undefined_exception = exc
-
+ def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
+ self._undefined_hint = hint
+ self._undefined_obj = obj
+ self._undefined_name = name
+ self._undefined_exception = exc
+
@property
def _undefined_message(self):
"""Build a message about the undefined value based on how it was
@@ -739,19 +739,19 @@ class Undefined(object):
self._undefined_name,
)
- @internalcode
- def _fail_with_undefined_error(self, *args, **kwargs):
+ @internalcode
+ def _fail_with_undefined_error(self, *args, **kwargs):
"""Raise an :exc:`UndefinedError` when operations are performed
on the undefined value.
- """
+ """
raise self._undefined_exception(self._undefined_message)
-
- @internalcode
- def __getattr__(self, name):
+
+ @internalcode
+ def __getattr__(self, name):
if name[:2] == "__":
- raise AttributeError(name)
- return self._fail_with_undefined_error()
-
+ raise AttributeError(name)
+ return self._fail_with_undefined_error()
+
__add__ = (
__radd__
) = (
@@ -797,121 +797,121 @@ class Undefined(object):
) = (
__complex__
) = __pow__ = __rpow__ = __sub__ = __rsub__ = _fail_with_undefined_error
-
- def __eq__(self, other):
- return type(self) is type(other)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __hash__(self):
- return id(type(self))
-
- def __str__(self):
+
+ def __eq__(self, other):
+ return type(self) is type(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return id(type(self))
+
+ def __str__(self):
return u""
-
- def __len__(self):
- return 0
-
- def __iter__(self):
- if 0:
- yield None
-
- def __nonzero__(self):
- return False
-
- __bool__ = __nonzero__
-
- def __repr__(self):
+
+ def __len__(self):
+ return 0
+
+ def __iter__(self):
+ if 0:
+ yield None
+
+ def __nonzero__(self):
+ return False
+
+ __bool__ = __nonzero__
+
+ def __repr__(self):
return "Undefined"
-
-
-def make_logging_undefined(logger=None, base=None):
- """Given a logger object this returns a new undefined class that will
- log certain failures. It will log iterations and printing. If no
- logger is given a default logger is created.
-
- Example::
-
- logger = logging.getLogger(__name__)
- LoggingUndefined = make_logging_undefined(
- logger=logger,
- base=Undefined
- )
-
- .. versionadded:: 2.8
-
- :param logger: the logger to use. If not provided, a default logger
- is created.
- :param base: the base class to add logging functionality to. This
- defaults to :class:`Undefined`.
- """
- if logger is None:
- import logging
-
- logger = logging.getLogger(__name__)
- logger.addHandler(logging.StreamHandler(sys.stderr))
- if base is None:
- base = Undefined
-
- def _log_message(undef):
- if undef._undefined_hint is None:
- if undef._undefined_obj is missing:
+
+
+def make_logging_undefined(logger=None, base=None):
+ """Given a logger object this returns a new undefined class that will
+ log certain failures. It will log iterations and printing. If no
+ logger is given a default logger is created.
+
+ Example::
+
+ logger = logging.getLogger(__name__)
+ LoggingUndefined = make_logging_undefined(
+ logger=logger,
+ base=Undefined
+ )
+
+ .. versionadded:: 2.8
+
+ :param logger: the logger to use. If not provided, a default logger
+ is created.
+ :param base: the base class to add logging functionality to. This
+ defaults to :class:`Undefined`.
+ """
+ if logger is None:
+ import logging
+
+ logger = logging.getLogger(__name__)
+ logger.addHandler(logging.StreamHandler(sys.stderr))
+ if base is None:
+ base = Undefined
+
+ def _log_message(undef):
+ if undef._undefined_hint is None:
+ if undef._undefined_obj is missing:
hint = "%s is undefined" % undef._undefined_name
- elif not isinstance(undef._undefined_name, string_types):
+ elif not isinstance(undef._undefined_name, string_types):
hint = "%s has no element %s" % (
- object_type_repr(undef._undefined_obj),
+ object_type_repr(undef._undefined_obj),
undef._undefined_name,
)
- else:
+ else:
hint = "%s has no attribute %s" % (
- object_type_repr(undef._undefined_obj),
+ object_type_repr(undef._undefined_obj),
undef._undefined_name,
)
- else:
- hint = undef._undefined_hint
+ else:
+ hint = undef._undefined_hint
logger.warning("Template variable warning: %s", hint)
-
- class LoggingUndefined(base):
- def _fail_with_undefined_error(self, *args, **kwargs):
- try:
- return base._fail_with_undefined_error(self, *args, **kwargs)
- except self._undefined_exception as e:
+
+ class LoggingUndefined(base):
+ def _fail_with_undefined_error(self, *args, **kwargs):
+ try:
+ return base._fail_with_undefined_error(self, *args, **kwargs)
+ except self._undefined_exception as e:
logger.error("Template variable error: %s", str(e))
- raise e
-
- def __str__(self):
- rv = base.__str__(self)
- _log_message(self)
- return rv
-
- def __iter__(self):
- rv = base.__iter__(self)
- _log_message(self)
- return rv
-
- if PY2:
-
- def __nonzero__(self):
- rv = base.__nonzero__(self)
- _log_message(self)
- return rv
-
- def __unicode__(self):
- rv = base.__unicode__(self)
- _log_message(self)
- return rv
-
- else:
-
- def __bool__(self):
- rv = base.__bool__(self)
- _log_message(self)
- return rv
-
- return LoggingUndefined
-
-
+ raise e
+
+ def __str__(self):
+ rv = base.__str__(self)
+ _log_message(self)
+ return rv
+
+ def __iter__(self):
+ rv = base.__iter__(self)
+ _log_message(self)
+ return rv
+
+ if PY2:
+
+ def __nonzero__(self):
+ rv = base.__nonzero__(self)
+ _log_message(self)
+ return rv
+
+ def __unicode__(self):
+ rv = base.__unicode__(self)
+ _log_message(self)
+ return rv
+
+ else:
+
+ def __bool__(self):
+ rv = base.__bool__(self)
+ _log_message(self)
+ return rv
+
+ return LoggingUndefined
+
+
# No @implements_to_string decorator here because __str__
# is not overwritten from Undefined in this class.
# This would cause a recursion error in Python 2.
@@ -942,56 +942,56 @@ class ChainableUndefined(Undefined):
__getitem__ = __getattr__
-@implements_to_string
-class DebugUndefined(Undefined):
- """An undefined that returns the debug info when printed.
-
- >>> foo = DebugUndefined(name='foo')
- >>> str(foo)
- '{{ foo }}'
- >>> not foo
- True
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = ()
-
- def __str__(self):
- if self._undefined_hint is None:
- if self._undefined_obj is missing:
+@implements_to_string
+class DebugUndefined(Undefined):
+ """An undefined that returns the debug info when printed.
+
+ >>> foo = DebugUndefined(name='foo')
+ >>> str(foo)
+ '{{ foo }}'
+ >>> not foo
+ True
+ >>> foo + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ """
+
+ __slots__ = ()
+
+ def __str__(self):
+ if self._undefined_hint is None:
+ if self._undefined_obj is missing:
return u"{{ %s }}" % self._undefined_name
return "{{ no such element: %s[%r] }}" % (
- object_type_repr(self._undefined_obj),
+ object_type_repr(self._undefined_obj),
self._undefined_name,
- )
+ )
return u"{{ undefined value printed: %s }}" % self._undefined_hint
-
-
-@implements_to_string
-class StrictUndefined(Undefined):
- """An undefined that barks on print and iteration as well as boolean
- tests and all kinds of comparisons. In other words: you can do nothing
- with it except checking if it's defined using the `defined` test.
-
- >>> foo = StrictUndefined(name='foo')
- >>> str(foo)
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- >>> not foo
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = ()
+
+
+@implements_to_string
+class StrictUndefined(Undefined):
+ """An undefined that barks on print and iteration as well as boolean
+ tests and all kinds of comparisons. In other words: you can do nothing
+ with it except checking if it's defined using the `defined` test.
+
+ >>> foo = StrictUndefined(name='foo')
+ >>> str(foo)
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ >>> not foo
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ >>> foo + 42
+ Traceback (most recent call last):
+ ...
+ jinja2.exceptions.UndefinedError: 'foo' is undefined
+ """
+
+ __slots__ = ()
__iter__ = (
__str__
) = (
@@ -999,10 +999,10 @@ class StrictUndefined(Undefined):
) = (
__nonzero__
) = __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
-
-
-# remove remaining slots attributes, after the metaclass did the magic they
-# are unneeded and irritating as they contain wrong data for the subclasses.
+
+
+# remove remaining slots attributes, after the metaclass did the magic they
+# are unneeded and irritating as they contain wrong data for the subclasses.
del (
Undefined.__slots__,
ChainableUndefined.__slots__,
diff --git a/contrib/python/Jinja2/py2/jinja2/sandbox.py b/contrib/python/Jinja2/py2/jinja2/sandbox.py
index cfd7993aee..8434d7702f 100644
--- a/contrib/python/Jinja2/py2/jinja2/sandbox.py
+++ b/contrib/python/Jinja2/py2/jinja2/sandbox.py
@@ -1,28 +1,28 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""A sandbox layer that ensures unsafe operations cannot be performed.
Useful when the template itself comes from an untrusted source.
-"""
+"""
import operator
-import types
+import types
import warnings
from collections import deque
from string import Formatter
-
-from markupsafe import EscapeFormatter
+
+from markupsafe import EscapeFormatter
from markupsafe import Markup
-
+
from ._compat import abc
from ._compat import PY2
from ._compat import range_type
from ._compat import string_types
from .environment import Environment
from .exceptions import SecurityError
-
-#: maximum number of items a range may produce
-MAX_RANGE = 100000
-
-#: attributes of function objects that are considered unsafe.
-if PY2:
+
+#: maximum number of items a range may produce
+MAX_RANGE = 100000
+
+#: attributes of function objects that are considered unsafe.
+if PY2:
UNSAFE_FUNCTION_ATTRIBUTES = {
"func_closure",
"func_code",
@@ -30,56 +30,56 @@ if PY2:
"func_defaults",
"func_globals",
}
-else:
- # On versions > python 2 the special attributes on functions are gone,
- # but they remain on methods and generators for whatever reason.
- UNSAFE_FUNCTION_ATTRIBUTES = set()
-
-#: unsafe method attributes. function attributes are unsafe for methods too
+else:
+ # On versions > python 2 the special attributes on functions are gone,
+ # but they remain on methods and generators for whatever reason.
+ UNSAFE_FUNCTION_ATTRIBUTES = set()
+
+#: unsafe method attributes. function attributes are unsafe for methods too
UNSAFE_METHOD_ATTRIBUTES = {"im_class", "im_func", "im_self"}
-
+
#: unsafe generator attributes.
UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
-
-#: unsafe attributes on coroutines
+
+#: unsafe attributes on coroutines
UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
-
-#: unsafe attributes on async generators
+
+#: unsafe attributes on async generators
UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
-
-# make sure we don't warn in python 2.6 about stuff we don't care about
+
+# make sure we don't warn in python 2.6 about stuff we don't care about
warnings.filterwarnings(
"ignore", "the sets module", DeprecationWarning, module=__name__
)
-
-_mutable_set_types = (set,)
-_mutable_mapping_types = (dict,)
-_mutable_sequence_types = (list,)
-
-# on python 2.x we can register the user collection types
-try:
- from UserDict import UserDict, DictMixin
- from UserList import UserList
-
- _mutable_mapping_types += (UserDict, DictMixin)
- _mutable_set_types += (UserList,)
-except ImportError:
- pass
-
-# if sets is still available, register the mutable set from there as well
-try:
- from sets import Set
-
- _mutable_set_types += (Set,)
-except ImportError:
- pass
-
-#: register Python 2.6 abstract base classes
+
+_mutable_set_types = (set,)
+_mutable_mapping_types = (dict,)
+_mutable_sequence_types = (list,)
+
+# on python 2.x we can register the user collection types
+try:
+ from UserDict import UserDict, DictMixin
+ from UserList import UserList
+
+ _mutable_mapping_types += (UserDict, DictMixin)
+ _mutable_set_types += (UserList,)
+except ImportError:
+ pass
+
+# if sets is still available, register the mutable set from there as well
+try:
+ from sets import Set
+
+ _mutable_set_types += (Set,)
+except ImportError:
+ pass
+
+#: register Python 2.6 abstract base classes
_mutable_set_types += (abc.MutableSet,)
_mutable_mapping_types += (abc.MutableMapping,)
_mutable_sequence_types += (abc.MutableSequence,)
-
-_mutable_spec = (
+
+_mutable_spec = (
(
_mutable_set_types,
frozenset(
@@ -119,160 +119,160 @@ _mutable_spec = (
]
),
),
-)
-
-
+)
+
+
class _MagicFormatMapping(abc.Mapping):
- """This class implements a dummy wrapper to fix a bug in the Python
- standard library for string formatting.
-
- See https://bugs.python.org/issue13598 for information about why
- this is necessary.
- """
-
- def __init__(self, args, kwargs):
- self._args = args
- self._kwargs = kwargs
- self._last_index = 0
-
- def __getitem__(self, key):
+ """This class implements a dummy wrapper to fix a bug in the Python
+ standard library for string formatting.
+
+ See https://bugs.python.org/issue13598 for information about why
+ this is necessary.
+ """
+
+ def __init__(self, args, kwargs):
+ self._args = args
+ self._kwargs = kwargs
+ self._last_index = 0
+
+ def __getitem__(self, key):
if key == "":
- idx = self._last_index
- self._last_index += 1
- try:
- return self._args[idx]
- except LookupError:
- pass
- key = str(idx)
- return self._kwargs[key]
-
- def __iter__(self):
- return iter(self._kwargs)
-
- def __len__(self):
- return len(self._kwargs)
-
-
-def inspect_format_method(callable):
+ idx = self._last_index
+ self._last_index += 1
+ try:
+ return self._args[idx]
+ except LookupError:
+ pass
+ key = str(idx)
+ return self._kwargs[key]
+
+ def __iter__(self):
+ return iter(self._kwargs)
+
+ def __len__(self):
+ return len(self._kwargs)
+
+
+def inspect_format_method(callable):
if not isinstance(
callable, (types.MethodType, types.BuiltinMethodType)
) or callable.__name__ not in ("format", "format_map"):
- return None
- obj = callable.__self__
- if isinstance(obj, string_types):
- return obj
-
-
-def safe_range(*args):
- """A range that can't generate ranges with a length of more than
- MAX_RANGE items.
- """
+ return None
+ obj = callable.__self__
+ if isinstance(obj, string_types):
+ return obj
+
+
+def safe_range(*args):
+ """A range that can't generate ranges with a length of more than
+ MAX_RANGE items.
+ """
rng = range_type(*args)
- if len(rng) > MAX_RANGE:
+ if len(rng) > MAX_RANGE:
raise OverflowError(
"Range too big. The sandbox blocks ranges larger than"
" MAX_RANGE (%d)." % MAX_RANGE
)
- return rng
-
-
-def unsafe(f):
- """Marks a function or method as unsafe.
-
- ::
-
- @unsafe
- def delete(self):
- pass
- """
- f.unsafe_callable = True
- return f
-
-
-def is_internal_attribute(obj, attr):
- """Test if the attribute given is an internal python attribute. For
- example this function returns `True` for the `func_code` attribute of
- python objects. This is useful if the environment method
- :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
-
- >>> from jinja2.sandbox import is_internal_attribute
- >>> is_internal_attribute(str, "mro")
- True
- >>> is_internal_attribute(str, "upper")
- False
- """
- if isinstance(obj, types.FunctionType):
- if attr in UNSAFE_FUNCTION_ATTRIBUTES:
- return True
- elif isinstance(obj, types.MethodType):
+ return rng
+
+
+def unsafe(f):
+ """Marks a function or method as unsafe.
+
+ ::
+
+ @unsafe
+ def delete(self):
+ pass
+ """
+ f.unsafe_callable = True
+ return f
+
+
+def is_internal_attribute(obj, attr):
+ """Test if the attribute given is an internal python attribute. For
+ example this function returns `True` for the `func_code` attribute of
+ python objects. This is useful if the environment method
+ :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
+
+ >>> from jinja2.sandbox import is_internal_attribute
+ >>> is_internal_attribute(str, "mro")
+ True
+ >>> is_internal_attribute(str, "upper")
+ False
+ """
+ if isinstance(obj, types.FunctionType):
+ if attr in UNSAFE_FUNCTION_ATTRIBUTES:
+ return True
+ elif isinstance(obj, types.MethodType):
if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
- return True
- elif isinstance(obj, type):
+ return True
+ elif isinstance(obj, type):
if attr == "mro":
- return True
- elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
- return True
- elif isinstance(obj, types.GeneratorType):
- if attr in UNSAFE_GENERATOR_ATTRIBUTES:
- return True
+ return True
+ elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
+ return True
+ elif isinstance(obj, types.GeneratorType):
+ if attr in UNSAFE_GENERATOR_ATTRIBUTES:
+ return True
elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
- if attr in UNSAFE_COROUTINE_ATTRIBUTES:
- return True
+ if attr in UNSAFE_COROUTINE_ATTRIBUTES:
+ return True
elif hasattr(types, "AsyncGeneratorType") and isinstance(
obj, types.AsyncGeneratorType
):
- if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
- return True
+ if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
+ return True
return attr.startswith("__")
-
-
-def modifies_known_mutable(obj, attr):
- """This function checks if an attribute on a builtin mutable object
- (list, dict, set or deque) would modify it if called. It also supports
- the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
- with Python 2.6 onwards the abstract base classes `MutableSet`,
- `MutableMapping`, and `MutableSequence`.
-
- >>> modifies_known_mutable({}, "clear")
- True
- >>> modifies_known_mutable({}, "keys")
- False
- >>> modifies_known_mutable([], "append")
- True
- >>> modifies_known_mutable([], "index")
- False
-
- If called with an unsupported object (such as unicode) `False` is
- returned.
-
- >>> modifies_known_mutable("foo", "upper")
- False
- """
- for typespec, unsafe in _mutable_spec:
- if isinstance(obj, typespec):
- return attr in unsafe
- return False
-
-
-class SandboxedEnvironment(Environment):
- """The sandboxed environment. It works like the regular environment but
- tells the compiler to generate sandboxed code. Additionally subclasses of
- this environment may override the methods that tell the runtime what
- attributes or functions are safe to access.
-
- If the template tries to access insecure code a :exc:`SecurityError` is
- raised. However also other exceptions may occur during the rendering so
- the caller has to ensure that all exceptions are caught.
- """
-
- sandboxed = True
-
- #: default callback table for the binary operators. A copy of this is
- #: available on each instance of a sandboxed environment as
- #: :attr:`binop_table`
- default_binop_table = {
+
+
+def modifies_known_mutable(obj, attr):
+ """This function checks if an attribute on a builtin mutable object
+ (list, dict, set or deque) would modify it if called. It also supports
+ the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
+ with Python 2.6 onwards the abstract base classes `MutableSet`,
+ `MutableMapping`, and `MutableSequence`.
+
+ >>> modifies_known_mutable({}, "clear")
+ True
+ >>> modifies_known_mutable({}, "keys")
+ False
+ >>> modifies_known_mutable([], "append")
+ True
+ >>> modifies_known_mutable([], "index")
+ False
+
+ If called with an unsupported object (such as unicode) `False` is
+ returned.
+
+ >>> modifies_known_mutable("foo", "upper")
+ False
+ """
+ for typespec, unsafe in _mutable_spec:
+ if isinstance(obj, typespec):
+ return attr in unsafe
+ return False
+
+
+class SandboxedEnvironment(Environment):
+ """The sandboxed environment. It works like the regular environment but
+ tells the compiler to generate sandboxed code. Additionally subclasses of
+ this environment may override the methods that tell the runtime what
+ attributes or functions are safe to access.
+
+ If the template tries to access insecure code a :exc:`SecurityError` is
+ raised. However also other exceptions may occur during the rendering so
+ the caller has to ensure that all exceptions are caught.
+ """
+
+ sandboxed = True
+
+ #: default callback table for the binary operators. A copy of this is
+ #: available on each instance of a sandboxed environment as
+ #: :attr:`binop_table`
+ default_binop_table = {
"+": operator.add,
"-": operator.sub,
"*": operator.mul,
@@ -280,144 +280,144 @@ class SandboxedEnvironment(Environment):
"//": operator.floordiv,
"**": operator.pow,
"%": operator.mod,
- }
-
- #: default callback table for the unary operators. A copy of this is
- #: available on each instance of a sandboxed environment as
- #: :attr:`unop_table`
+ }
+
+ #: default callback table for the unary operators. A copy of this is
+ #: available on each instance of a sandboxed environment as
+ #: :attr:`unop_table`
default_unop_table = {"+": operator.pos, "-": operator.neg}
-
- #: a set of binary operators that should be intercepted. Each operator
- #: that is added to this set (empty by default) is delegated to the
- #: :meth:`call_binop` method that will perform the operator. The default
- #: operator callback is specified by :attr:`binop_table`.
- #:
- #: The following binary operators are interceptable:
- #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
- #:
- #: The default operation form the operator table corresponds to the
- #: builtin function. Intercepted calls are always slower than the native
- #: operator call, so make sure only to intercept the ones you are
- #: interested in.
- #:
- #: .. versionadded:: 2.6
- intercepted_binops = frozenset()
-
- #: a set of unary operators that should be intercepted. Each operator
- #: that is added to this set (empty by default) is delegated to the
- #: :meth:`call_unop` method that will perform the operator. The default
- #: operator callback is specified by :attr:`unop_table`.
- #:
- #: The following unary operators are interceptable: ``+``, ``-``
- #:
- #: The default operation form the operator table corresponds to the
- #: builtin function. Intercepted calls are always slower than the native
- #: operator call, so make sure only to intercept the ones you are
- #: interested in.
- #:
- #: .. versionadded:: 2.6
- intercepted_unops = frozenset()
-
- def intercept_unop(self, operator):
- """Called during template compilation with the name of a unary
- operator to check if it should be intercepted at runtime. If this
+
+ #: a set of binary operators that should be intercepted. Each operator
+ #: that is added to this set (empty by default) is delegated to the
+ #: :meth:`call_binop` method that will perform the operator. The default
+ #: operator callback is specified by :attr:`binop_table`.
+ #:
+ #: The following binary operators are interceptable:
+ #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
+ #:
+ #: The default operation form the operator table corresponds to the
+ #: builtin function. Intercepted calls are always slower than the native
+ #: operator call, so make sure only to intercept the ones you are
+ #: interested in.
+ #:
+ #: .. versionadded:: 2.6
+ intercepted_binops = frozenset()
+
+ #: a set of unary operators that should be intercepted. Each operator
+ #: that is added to this set (empty by default) is delegated to the
+ #: :meth:`call_unop` method that will perform the operator. The default
+ #: operator callback is specified by :attr:`unop_table`.
+ #:
+ #: The following unary operators are interceptable: ``+``, ``-``
+ #:
+ #: The default operation form the operator table corresponds to the
+ #: builtin function. Intercepted calls are always slower than the native
+ #: operator call, so make sure only to intercept the ones you are
+ #: interested in.
+ #:
+ #: .. versionadded:: 2.6
+ intercepted_unops = frozenset()
+
+ def intercept_unop(self, operator):
+ """Called during template compilation with the name of a unary
+ operator to check if it should be intercepted at runtime. If this
method returns `True`, :meth:`call_unop` is executed for this unary
- operator. The default implementation of :meth:`call_unop` will use
- the :attr:`unop_table` dictionary to perform the operator with the
- same logic as the builtin one.
-
- The following unary operators are interceptable: ``+`` and ``-``
-
- Intercepted calls are always slower than the native operator call,
- so make sure only to intercept the ones you are interested in.
-
- .. versionadded:: 2.6
- """
- return False
-
- def __init__(self, *args, **kwargs):
- Environment.__init__(self, *args, **kwargs)
+ operator. The default implementation of :meth:`call_unop` will use
+ the :attr:`unop_table` dictionary to perform the operator with the
+ same logic as the builtin one.
+
+ The following unary operators are interceptable: ``+`` and ``-``
+
+ Intercepted calls are always slower than the native operator call,
+ so make sure only to intercept the ones you are interested in.
+
+ .. versionadded:: 2.6
+ """
+ return False
+
+ def __init__(self, *args, **kwargs):
+ Environment.__init__(self, *args, **kwargs)
self.globals["range"] = safe_range
- self.binop_table = self.default_binop_table.copy()
- self.unop_table = self.default_unop_table.copy()
-
- def is_safe_attribute(self, obj, attr, value):
- """The sandboxed environment will call this method to check if the
- attribute of an object is safe to access. Per default all attributes
- starting with an underscore are considered private as well as the
- special attributes of internal python objects as returned by the
- :func:`is_internal_attribute` function.
- """
+ self.binop_table = self.default_binop_table.copy()
+ self.unop_table = self.default_unop_table.copy()
+
+ def is_safe_attribute(self, obj, attr, value):
+ """The sandboxed environment will call this method to check if the
+ attribute of an object is safe to access. Per default all attributes
+ starting with an underscore are considered private as well as the
+ special attributes of internal python objects as returned by the
+ :func:`is_internal_attribute` function.
+ """
return not (attr.startswith("_") or is_internal_attribute(obj, attr))
-
- def is_safe_callable(self, obj):
- """Check if an object is safely callable. Per default a function is
- considered safe unless the `unsafe_callable` attribute exists and is
- True. Override this method to alter the behavior, but this won't
- affect the `unsafe` decorator from this module.
- """
+
+ def is_safe_callable(self, obj):
+ """Check if an object is safely callable. Per default a function is
+ considered safe unless the `unsafe_callable` attribute exists and is
+ True. Override this method to alter the behavior, but this won't
+ affect the `unsafe` decorator from this module.
+ """
return not (
getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
)
-
- def call_binop(self, context, operator, left, right):
- """For intercepted binary operator calls (:meth:`intercepted_binops`)
- this function is executed instead of the builtin operator. This can
- be used to fine tune the behavior of certain operators.
-
- .. versionadded:: 2.6
- """
- return self.binop_table[operator](left, right)
-
- def call_unop(self, context, operator, arg):
- """For intercepted unary operator calls (:meth:`intercepted_unops`)
- this function is executed instead of the builtin operator. This can
- be used to fine tune the behavior of certain operators.
-
- .. versionadded:: 2.6
- """
- return self.unop_table[operator](arg)
-
- def getitem(self, obj, argument):
- """Subscribe an object from sandboxed code."""
- try:
- return obj[argument]
- except (TypeError, LookupError):
- if isinstance(argument, string_types):
- try:
- attr = str(argument)
- except Exception:
- pass
- else:
- try:
- value = getattr(obj, attr)
- except AttributeError:
- pass
- else:
- if self.is_safe_attribute(obj, argument, value):
- return value
- return self.unsafe_undefined(obj, argument)
- return self.undefined(obj=obj, name=argument)
-
- def getattr(self, obj, attribute):
- """Subscribe an object from sandboxed code and prefer the
- attribute. The attribute passed *must* be a bytestring.
- """
- try:
- value = getattr(obj, attribute)
- except AttributeError:
- try:
- return obj[attribute]
- except (TypeError, LookupError):
- pass
- else:
- if self.is_safe_attribute(obj, attribute, value):
- return value
- return self.unsafe_undefined(obj, attribute)
- return self.undefined(obj=obj, name=attribute)
-
- def unsafe_undefined(self, obj, attribute):
- """Return an undefined object for unsafe attributes."""
+
+ def call_binop(self, context, operator, left, right):
+ """For intercepted binary operator calls (:meth:`intercepted_binops`)
+ this function is executed instead of the builtin operator. This can
+ be used to fine tune the behavior of certain operators.
+
+ .. versionadded:: 2.6
+ """
+ return self.binop_table[operator](left, right)
+
+ def call_unop(self, context, operator, arg):
+ """For intercepted unary operator calls (:meth:`intercepted_unops`)
+ this function is executed instead of the builtin operator. This can
+ be used to fine tune the behavior of certain operators.
+
+ .. versionadded:: 2.6
+ """
+ return self.unop_table[operator](arg)
+
+ def getitem(self, obj, argument):
+ """Subscribe an object from sandboxed code."""
+ try:
+ return obj[argument]
+ except (TypeError, LookupError):
+ if isinstance(argument, string_types):
+ try:
+ attr = str(argument)
+ except Exception:
+ pass
+ else:
+ try:
+ value = getattr(obj, attr)
+ except AttributeError:
+ pass
+ else:
+ if self.is_safe_attribute(obj, argument, value):
+ return value
+ return self.unsafe_undefined(obj, argument)
+ return self.undefined(obj=obj, name=argument)
+
+ def getattr(self, obj, attribute):
+ """Subscribe an object from sandboxed code and prefer the
+ attribute. The attribute passed *must* be a bytestring.
+ """
+ try:
+ value = getattr(obj, attribute)
+ except AttributeError:
+ try:
+ return obj[attribute]
+ except (TypeError, LookupError):
+ pass
+ else:
+ if self.is_safe_attribute(obj, attribute, value):
+ return value
+ return self.unsafe_undefined(obj, attribute)
+ return self.undefined(obj=obj, name=attribute)
+
+ def unsafe_undefined(self, obj, attribute):
+ """Return an undefined object for unsafe attributes."""
return self.undefined(
"access to attribute %r of %r "
"object is unsafe." % (attribute, obj.__class__.__name__),
@@ -425,15 +425,15 @@ class SandboxedEnvironment(Environment):
obj=obj,
exc=SecurityError,
)
-
+
def format_string(self, s, args, kwargs, format_func=None):
- """If a format call is detected, then this is routed through this
- method so that our safety sandbox can be used for it.
- """
- if isinstance(s, Markup):
- formatter = SandboxedEscapeFormatter(self, s.escape)
- else:
- formatter = SandboxedFormatter(self)
+ """If a format call is detected, then this is routed through this
+ method so that our safety sandbox can be used for it.
+ """
+ if isinstance(s, Markup):
+ formatter = SandboxedEscapeFormatter(self, s.escape)
+ else:
+ formatter = SandboxedFormatter(self)
if format_func is not None and format_func.__name__ == "format_map":
if len(args) != 1 or kwargs:
@@ -445,66 +445,66 @@ class SandboxedEnvironment(Environment):
kwargs = args[0]
args = None
- kwargs = _MagicFormatMapping(args, kwargs)
- rv = formatter.vformat(s, args, kwargs)
- return type(s)(rv)
-
+ kwargs = _MagicFormatMapping(args, kwargs)
+ rv = formatter.vformat(s, args, kwargs)
+ return type(s)(rv)
+
def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
- """Call an object from sandboxed code."""
- fmt = inspect_format_method(__obj)
- if fmt is not None:
+ """Call an object from sandboxed code."""
+ fmt = inspect_format_method(__obj)
+ if fmt is not None:
return __self.format_string(fmt, args, kwargs, __obj)
-
- # the double prefixes are to avoid double keyword argument
- # errors when proxying the call.
- if not __self.is_safe_callable(__obj):
+
+ # the double prefixes are to avoid double keyword argument
+ # errors when proxying the call.
+ if not __self.is_safe_callable(__obj):
raise SecurityError("%r is not safely callable" % (__obj,))
- return __context.call(__obj, *args, **kwargs)
-
-
-class ImmutableSandboxedEnvironment(SandboxedEnvironment):
- """Works exactly like the regular `SandboxedEnvironment` but does not
- permit modifications on the builtin mutable objects `list`, `set`, and
- `dict` by using the :func:`modifies_known_mutable` function.
- """
-
- def is_safe_attribute(self, obj, attr, value):
- if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
- return False
- return not modifies_known_mutable(obj, attr)
-
-
+ return __context.call(__obj, *args, **kwargs)
+
+
+class ImmutableSandboxedEnvironment(SandboxedEnvironment):
+ """Works exactly like the regular `SandboxedEnvironment` but does not
+ permit modifications on the builtin mutable objects `list`, `set`, and
+ `dict` by using the :func:`modifies_known_mutable` function.
+ """
+
+ def is_safe_attribute(self, obj, attr, value):
+ if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
+ return False
+ return not modifies_known_mutable(obj, attr)
+
+
# This really is not a public API apparently.
-try:
- from _string import formatter_field_name_split
-except ImportError:
-
- def formatter_field_name_split(field_name):
- return field_name._formatter_field_name_split()
-
-
-class SandboxedFormatterMixin(object):
- def __init__(self, env):
- self._env = env
-
- def get_field(self, field_name, args, kwargs):
- first, rest = formatter_field_name_split(field_name)
- obj = self.get_value(first, args, kwargs)
- for is_attr, i in rest:
- if is_attr:
- obj = self._env.getattr(obj, i)
- else:
- obj = self._env.getitem(obj, i)
- return obj, first
-
-
-class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
- def __init__(self, env):
- SandboxedFormatterMixin.__init__(self, env)
- Formatter.__init__(self)
-
-
-class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
- def __init__(self, env, escape):
- SandboxedFormatterMixin.__init__(self, env)
- EscapeFormatter.__init__(self, escape)
+try:
+ from _string import formatter_field_name_split
+except ImportError:
+
+ def formatter_field_name_split(field_name):
+ return field_name._formatter_field_name_split()
+
+
+class SandboxedFormatterMixin(object):
+ def __init__(self, env):
+ self._env = env
+
+ def get_field(self, field_name, args, kwargs):
+ first, rest = formatter_field_name_split(field_name)
+ obj = self.get_value(first, args, kwargs)
+ for is_attr, i in rest:
+ if is_attr:
+ obj = self._env.getattr(obj, i)
+ else:
+ obj = self._env.getitem(obj, i)
+ return obj, first
+
+
+class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
+ def __init__(self, env):
+ SandboxedFormatterMixin.__init__(self, env)
+ Formatter.__init__(self)
+
+
+class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
+ def __init__(self, env, escape):
+ SandboxedFormatterMixin.__init__(self, env)
+ EscapeFormatter.__init__(self, escape)
diff --git a/contrib/python/Jinja2/py2/jinja2/tests.py b/contrib/python/Jinja2/py2/jinja2/tests.py
index fabd4ce51b..b4722ad1bb 100644
--- a/contrib/python/Jinja2/py2/jinja2/tests.py
+++ b/contrib/python/Jinja2/py2/jinja2/tests.py
@@ -1,9 +1,9 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""Built-in template tests used with the ``is`` operator."""
import decimal
-import operator
-import re
-
+import operator
+import re
+
from ._compat import abc
from ._compat import integer_types
from ._compat import string_types
@@ -11,52 +11,52 @@ from ._compat import text_type
from .runtime import Undefined
number_re = re.compile(r"^-?\d+(\.\d+)?$")
-regex_type = type(number_re)
-test_callable = callable
-
-
-def test_odd(value):
- """Return true if the variable is odd."""
- return value % 2 == 1
-
-
-def test_even(value):
- """Return true if the variable is even."""
- return value % 2 == 0
-
-
-def test_divisibleby(value, num):
- """Check if a variable is divisible by a number."""
- return value % num == 0
-
-
-def test_defined(value):
- """Return true if the variable is defined:
-
- .. sourcecode:: jinja
-
- {% if variable is defined %}
- value of variable: {{ variable }}
- {% else %}
- variable is not defined
- {% endif %}
-
- See the :func:`default` filter for a simple way to set undefined
- variables.
- """
- return not isinstance(value, Undefined)
-
-
-def test_undefined(value):
- """Like :func:`defined` but the other way round."""
- return isinstance(value, Undefined)
-
-
-def test_none(value):
- """Return true if the variable is none."""
- return value is None
-
-
+regex_type = type(number_re)
+test_callable = callable
+
+
+def test_odd(value):
+ """Return true if the variable is odd."""
+ return value % 2 == 1
+
+
+def test_even(value):
+ """Return true if the variable is even."""
+ return value % 2 == 0
+
+
+def test_divisibleby(value, num):
+ """Check if a variable is divisible by a number."""
+ return value % num == 0
+
+
+def test_defined(value):
+ """Return true if the variable is defined:
+
+ .. sourcecode:: jinja
+
+ {% if variable is defined %}
+ value of variable: {{ variable }}
+ {% else %}
+ variable is not defined
+ {% endif %}
+
+ See the :func:`default` filter for a simple way to set undefined
+ variables.
+ """
+ return not isinstance(value, Undefined)
+
+
+def test_undefined(value):
+ """Like :func:`defined` but the other way round."""
+ return isinstance(value, Undefined)
+
+
+def test_none(value):
+ """Return true if the variable is none."""
+ return value is None
+
+
def test_boolean(value):
"""Return true if the object is a boolean value.
@@ -99,82 +99,82 @@ def test_float(value):
return isinstance(value, float)
-def test_lower(value):
- """Return true if the variable is lowercased."""
- return text_type(value).islower()
-
-
-def test_upper(value):
- """Return true if the variable is uppercased."""
- return text_type(value).isupper()
-
-
-def test_string(value):
- """Return true if the object is a string."""
- return isinstance(value, string_types)
-
-
-def test_mapping(value):
- """Return true if the object is a mapping (dict etc.).
-
- .. versionadded:: 2.6
- """
+def test_lower(value):
+ """Return true if the variable is lowercased."""
+ return text_type(value).islower()
+
+
+def test_upper(value):
+ """Return true if the variable is uppercased."""
+ return text_type(value).isupper()
+
+
+def test_string(value):
+ """Return true if the object is a string."""
+ return isinstance(value, string_types)
+
+
+def test_mapping(value):
+ """Return true if the object is a mapping (dict etc.).
+
+ .. versionadded:: 2.6
+ """
return isinstance(value, abc.Mapping)
-
-
-def test_number(value):
- """Return true if the variable is a number."""
- return isinstance(value, integer_types + (float, complex, decimal.Decimal))
-
-
-def test_sequence(value):
- """Return true if the variable is a sequence. Sequences are variables
- that are iterable.
- """
- try:
- len(value)
- value.__getitem__
+
+
+def test_number(value):
+ """Return true if the variable is a number."""
+ return isinstance(value, integer_types + (float, complex, decimal.Decimal))
+
+
+def test_sequence(value):
+ """Return true if the variable is a sequence. Sequences are variables
+ that are iterable.
+ """
+ try:
+ len(value)
+ value.__getitem__
except Exception:
- return False
- return True
-
-
-def test_sameas(value, other):
- """Check if an object points to the same memory address than another
- object:
-
- .. sourcecode:: jinja
-
- {% if foo.attribute is sameas false %}
- the foo attribute really is the `False` singleton
- {% endif %}
- """
- return value is other
-
-
-def test_iterable(value):
- """Check if it's possible to iterate over an object."""
- try:
- iter(value)
- except TypeError:
- return False
- return True
-
-
-def test_escaped(value):
- """Check if the value is escaped."""
+ return False
+ return True
+
+
+def test_sameas(value, other):
+ """Check if an object points to the same memory address than another
+ object:
+
+ .. sourcecode:: jinja
+
+ {% if foo.attribute is sameas false %}
+ the foo attribute really is the `False` singleton
+ {% endif %}
+ """
+ return value is other
+
+
+def test_iterable(value):
+ """Check if it's possible to iterate over an object."""
+ try:
+ iter(value)
+ except TypeError:
+ return False
+ return True
+
+
+def test_escaped(value):
+ """Check if the value is escaped."""
return hasattr(value, "__html__")
-
-
-def test_in(value, seq):
- """Check if value is in seq.
-
- .. versionadded:: 2.10
- """
- return value in seq
-
-
-TESTS = {
+
+
+def test_in(value, seq):
+ """Check if value is in seq.
+
+ .. versionadded:: 2.10
+ """
+ return value in seq
+
+
+TESTS = {
"odd": test_odd,
"even": test_even,
"divisibleby": test_divisibleby,
@@ -212,4 +212,4 @@ TESTS = {
"lessthan": operator.lt,
"<=": operator.le,
"le": operator.le,
-}
+}
diff --git a/contrib/python/Jinja2/py2/jinja2/utils.py b/contrib/python/Jinja2/py2/jinja2/utils.py
index 6afca81055..a5273c9782 100644
--- a/contrib/python/Jinja2/py2/jinja2/utils.py
+++ b/contrib/python/Jinja2/py2/jinja2/utils.py
@@ -1,198 +1,198 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
import json
import os
-import re
+import re
import warnings
-from collections import deque
+from collections import deque
from random import choice
from random import randrange
from string import ascii_letters as _letters
from string import digits as _digits
-from threading import Lock
-
+from threading import Lock
+
from markupsafe import escape
from markupsafe import Markup
-
+
from ._compat import abc
from ._compat import string_types
from ._compat import text_type
from ._compat import url_quote
-# special singleton representing missing values for the runtime
+# special singleton representing missing values for the runtime
missing = type("MissingType", (), {"__repr__": lambda x: "missing"})()
-
-# internal code
-internal_code = set()
-
+
+# internal code
+internal_code = set()
+
concat = u"".join
-
+
_slash_escape = "\\/" not in json.dumps("/")
-
-
-def contextfunction(f):
- """This decorator can be used to mark a function or method context callable.
- A context callable is passed the active :class:`Context` as first argument when
- called from the template. This is useful if a function wants to get access
- to the context or functions provided on the context object. For example
- a function that returns a sorted list of template variables the current
- template exports could look like this::
-
- @contextfunction
- def get_exported_names(context):
- return sorted(context.exported_vars)
- """
- f.contextfunction = True
- return f
-
-
-def evalcontextfunction(f):
- """This decorator can be used to mark a function or method as an eval
- context callable. This is similar to the :func:`contextfunction`
- but instead of passing the context, an evaluation context object is
- passed. For more information about the eval context, see
- :ref:`eval-context`.
-
- .. versionadded:: 2.4
- """
- f.evalcontextfunction = True
- return f
-
-
-def environmentfunction(f):
- """This decorator can be used to mark a function or method as environment
- callable. This decorator works exactly like the :func:`contextfunction`
- decorator just that the first argument is the active :class:`Environment`
- and not context.
- """
- f.environmentfunction = True
- return f
-
-
-def internalcode(f):
- """Marks the function as internally used"""
- internal_code.add(f.__code__)
- return f
-
-
-def is_undefined(obj):
- """Check if the object passed is undefined. This does nothing more than
- performing an instance check against :class:`Undefined` but looks nicer.
- This can be used for custom filters or tests that want to react to
- undefined variables. For example a custom default filter can look like
- this::
-
- def default(var, default=''):
- if is_undefined(var):
- return default
- return var
- """
+
+
+def contextfunction(f):
+ """This decorator can be used to mark a function or method context callable.
+ A context callable is passed the active :class:`Context` as first argument when
+ called from the template. This is useful if a function wants to get access
+ to the context or functions provided on the context object. For example
+ a function that returns a sorted list of template variables the current
+ template exports could look like this::
+
+ @contextfunction
+ def get_exported_names(context):
+ return sorted(context.exported_vars)
+ """
+ f.contextfunction = True
+ return f
+
+
+def evalcontextfunction(f):
+ """This decorator can be used to mark a function or method as an eval
+ context callable. This is similar to the :func:`contextfunction`
+ but instead of passing the context, an evaluation context object is
+ passed. For more information about the eval context, see
+ :ref:`eval-context`.
+
+ .. versionadded:: 2.4
+ """
+ f.evalcontextfunction = True
+ return f
+
+
+def environmentfunction(f):
+ """This decorator can be used to mark a function or method as environment
+ callable. This decorator works exactly like the :func:`contextfunction`
+ decorator just that the first argument is the active :class:`Environment`
+ and not context.
+ """
+ f.environmentfunction = True
+ return f
+
+
+def internalcode(f):
+ """Marks the function as internally used"""
+ internal_code.add(f.__code__)
+ return f
+
+
+def is_undefined(obj):
+ """Check if the object passed is undefined. This does nothing more than
+ performing an instance check against :class:`Undefined` but looks nicer.
+ This can be used for custom filters or tests that want to react to
+ undefined variables. For example a custom default filter can look like
+ this::
+
+ def default(var, default=''):
+ if is_undefined(var):
+ return default
+ return var
+ """
from .runtime import Undefined
- return isinstance(obj, Undefined)
-
-
-def consume(iterable):
- """Consumes an iterable without doing anything with it."""
+ return isinstance(obj, Undefined)
+
+
+def consume(iterable):
+ """Consumes an iterable without doing anything with it."""
for _ in iterable:
- pass
-
-
-def clear_caches():
+ pass
+
+
+def clear_caches():
"""Jinja keeps internal caches for environments and lexers. These are
used so that Jinja doesn't have to recreate environments and lexers all
- the time. Normally you don't have to care about that but if you are
- measuring memory consumption you may want to clean the caches.
- """
+ the time. Normally you don't have to care about that but if you are
+ measuring memory consumption you may want to clean the caches.
+ """
from .environment import _spontaneous_environments
from .lexer import _lexer_cache
- _spontaneous_environments.clear()
- _lexer_cache.clear()
-
-
-def import_string(import_name, silent=False):
- """Imports an object based on a string. This is useful if you want to
- use import paths as endpoints or something similar. An import path can
- be specified either in dotted notation (``xml.sax.saxutils.escape``)
- or with a colon as object delimiter (``xml.sax.saxutils:escape``).
-
- If the `silent` is True the return value will be `None` if the import
- fails.
-
- :return: imported object
- """
- try:
+ _spontaneous_environments.clear()
+ _lexer_cache.clear()
+
+
+def import_string(import_name, silent=False):
+ """Imports an object based on a string. This is useful if you want to
+ use import paths as endpoints or something similar. An import path can
+ be specified either in dotted notation (``xml.sax.saxutils.escape``)
+ or with a colon as object delimiter (``xml.sax.saxutils:escape``).
+
+ If the `silent` is True the return value will be `None` if the import
+ fails.
+
+ :return: imported object
+ """
+ try:
if ":" in import_name:
module, obj = import_name.split(":", 1)
elif "." in import_name:
module, _, obj = import_name.rpartition(".")
- else:
- return __import__(import_name)
- return getattr(__import__(module, None, None, [obj]), obj)
- except (ImportError, AttributeError):
- if not silent:
- raise
-
-
+ else:
+ return __import__(import_name)
+ return getattr(__import__(module, None, None, [obj]), obj)
+ except (ImportError, AttributeError):
+ if not silent:
+ raise
+
+
def open_if_exists(filename, mode="rb"):
- """Returns a file descriptor for the filename if that file exists,
+ """Returns a file descriptor for the filename if that file exists,
otherwise ``None``.
- """
+ """
if not os.path.isfile(filename):
return None
-
+
return open(filename, mode)
-
-
-def object_type_repr(obj):
- """Returns the name of the object's type. For some recognized
- singletons the name of the object is returned instead. (For
- example for `None` and `Ellipsis`).
- """
- if obj is None:
+
+
+def object_type_repr(obj):
+ """Returns the name of the object's type. For some recognized
+ singletons the name of the object is returned instead. (For
+ example for `None` and `Ellipsis`).
+ """
+ if obj is None:
return "None"
- elif obj is Ellipsis:
+ elif obj is Ellipsis:
return "Ellipsis"
cls = type(obj)
- # __builtin__ in 2.x, builtins in 3.x
+ # __builtin__ in 2.x, builtins in 3.x
if cls.__module__ in ("__builtin__", "builtins"):
name = cls.__name__
- else:
+ else:
name = cls.__module__ + "." + cls.__name__
-
+
return "%s object" % name
-
-
-def pformat(obj, verbose=False):
- """Prettyprint an object. Either use the `pretty` library or the
- builtin `pprint`.
- """
- try:
- from pretty import pretty
-
- return pretty(obj, verbose=verbose)
- except ImportError:
- from pprint import pformat
-
- return pformat(obj)
-
-
-def urlize(text, trim_url_limit=None, rel=None, target=None):
- """Converts any URLs in text into clickable links. Works on http://,
- https:// and www. links. Links can have trailing punctuation (periods,
- commas, close-parens) and leading punctuation (opening parens) and
- it'll still do the right thing.
-
- If trim_url_limit is not None, the URLs in link text will be limited
- to trim_url_limit characters.
-
- If nofollow is True, the URLs in link text will get a rel="nofollow"
- attribute.
-
- If target is not None, a target attribute will be added to the link.
- """
+
+
+def pformat(obj, verbose=False):
+ """Prettyprint an object. Either use the `pretty` library or the
+ builtin `pprint`.
+ """
+ try:
+ from pretty import pretty
+
+ return pretty(obj, verbose=verbose)
+ except ImportError:
+ from pprint import pformat
+
+ return pformat(obj)
+
+
+def urlize(text, trim_url_limit=None, rel=None, target=None):
+ """Converts any URLs in text into clickable links. Works on http://,
+ https:// and www. links. Links can have trailing punctuation (periods,
+ commas, close-parens) and leading punctuation (opening parens) and
+ it'll still do the right thing.
+
+ If trim_url_limit is not None, the URLs in link text will be limited
+ to trim_url_limit characters.
+
+ If nofollow is True, the URLs in link text will get a rel="nofollow"
+ attribute.
+
+ If target is not None, a target attribute will be added to the link.
+ """
trim_url = (
lambda x, limit=trim_url_limit: limit is not None
and (x[:limit] + (len(x) >= limit and "..." or ""))
@@ -201,12 +201,12 @@ def urlize(text, trim_url_limit=None, rel=None, target=None):
words = re.split(r"(\s+)", text_type(escape(text)))
rel_attr = rel and ' rel="%s"' % text_type(escape(rel)) or ""
target_attr = target and ' target="%s"' % escape(target) or ""
-
- for i, word in enumerate(words):
+
+ for i, word in enumerate(words):
head, middle, tail = "", word, ""
match = re.match(r"^([(<]|&lt;)+", middle)
- if match:
+ if match:
head = match.group()
middle = middle[match.end() :]
@@ -258,60 +258,60 @@ def urlize(text, trim_url_limit=None, rel=None, target=None):
words[i] = head + middle + tail
return u"".join(words)
-
-
-def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
- """Generate some lorem ipsum for the template."""
+
+
+def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
+ """Generate some lorem ipsum for the template."""
from .constants import LOREM_IPSUM_WORDS
- words = LOREM_IPSUM_WORDS.split()
- result = []
-
- for _ in range(n):
- next_capitalized = True
- last_comma = last_fullstop = 0
- word = None
- last = None
- p = []
-
- # each paragraph contains out of 20 to 100 words.
- for idx, _ in enumerate(range(randrange(min, max))):
- while True:
- word = choice(words)
- if word != last:
- last = word
- break
- if next_capitalized:
- word = word.capitalize()
- next_capitalized = False
- # add commas
- if idx - randrange(3, 8) > last_comma:
- last_comma = idx
- last_fullstop += 2
+ words = LOREM_IPSUM_WORDS.split()
+ result = []
+
+ for _ in range(n):
+ next_capitalized = True
+ last_comma = last_fullstop = 0
+ word = None
+ last = None
+ p = []
+
+ # each paragraph contains out of 20 to 100 words.
+ for idx, _ in enumerate(range(randrange(min, max))):
+ while True:
+ word = choice(words)
+ if word != last:
+ last = word
+ break
+ if next_capitalized:
+ word = word.capitalize()
+ next_capitalized = False
+ # add commas
+ if idx - randrange(3, 8) > last_comma:
+ last_comma = idx
+ last_fullstop += 2
word += ","
- # add end of sentences
- if idx - randrange(10, 20) > last_fullstop:
- last_comma = last_fullstop = idx
+ # add end of sentences
+ if idx - randrange(10, 20) > last_fullstop:
+ last_comma = last_fullstop = idx
word += "."
- next_capitalized = True
- p.append(word)
-
- # ensure that the paragraph ends with a dot.
+ next_capitalized = True
+ p.append(word)
+
+ # ensure that the paragraph ends with a dot.
p = u" ".join(p)
if p.endswith(","):
p = p[:-1] + "."
elif not p.endswith("."):
p += "."
- result.append(p)
-
- if not html:
+ result.append(p)
+
+ if not html:
return u"\n\n".join(result)
return Markup(u"\n".join(u"<p>%s</p>" % escape(x) for x in result))
-
-
+
+
def unicode_urlencode(obj, charset="utf-8", for_qs=False):
"""Quote a string for use in a URL using the given charset.
-
+
This function is misnamed, it is a wrapper around
:func:`urllib.parse.quote`.
@@ -319,12 +319,12 @@ def unicode_urlencode(obj, charset="utf-8", for_qs=False):
string then encoded to bytes using the given charset.
:param charset: Encode text to bytes using this charset.
:param for_qs: Quote "/" and use "+" for spaces.
- """
- if not isinstance(obj, string_types):
- obj = text_type(obj)
+ """
+ if not isinstance(obj, string_types):
+ obj = text_type(obj)
- if isinstance(obj, text_type):
- obj = obj.encode(charset)
+ if isinstance(obj, text_type):
+ obj = obj.encode(charset)
safe = b"" if for_qs else b"/"
rv = url_quote(obj, safe)
@@ -332,172 +332,172 @@ def unicode_urlencode(obj, charset="utf-8", for_qs=False):
if not isinstance(rv, text_type):
rv = rv.decode("utf-8")
- if for_qs:
+ if for_qs:
rv = rv.replace("%20", "+")
- return rv
-
-
-class LRUCache(object):
- """A simple LRU Cache implementation."""
-
- # this is fast for small capacities (something below 1000) but doesn't
- # scale. But as long as it's only used as storage for templates this
- # won't do any harm.
-
- def __init__(self, capacity):
- self.capacity = capacity
- self._mapping = {}
- self._queue = deque()
- self._postinit()
-
- def _postinit(self):
- # alias all queue methods for faster lookup
- self._popleft = self._queue.popleft
- self._pop = self._queue.pop
- self._remove = self._queue.remove
- self._wlock = Lock()
- self._append = self._queue.append
-
- def __getstate__(self):
- return {
+ return rv
+
+
+class LRUCache(object):
+ """A simple LRU Cache implementation."""
+
+ # this is fast for small capacities (something below 1000) but doesn't
+ # scale. But as long as it's only used as storage for templates this
+ # won't do any harm.
+
+ def __init__(self, capacity):
+ self.capacity = capacity
+ self._mapping = {}
+ self._queue = deque()
+ self._postinit()
+
+ def _postinit(self):
+ # alias all queue methods for faster lookup
+ self._popleft = self._queue.popleft
+ self._pop = self._queue.pop
+ self._remove = self._queue.remove
+ self._wlock = Lock()
+ self._append = self._queue.append
+
+ def __getstate__(self):
+ return {
"capacity": self.capacity,
"_mapping": self._mapping,
"_queue": self._queue,
- }
-
- def __setstate__(self, d):
- self.__dict__.update(d)
- self._postinit()
-
- def __getnewargs__(self):
- return (self.capacity,)
-
- def copy(self):
- """Return a shallow copy of the instance."""
- rv = self.__class__(self.capacity)
- rv._mapping.update(self._mapping)
+ }
+
+ def __setstate__(self, d):
+ self.__dict__.update(d)
+ self._postinit()
+
+ def __getnewargs__(self):
+ return (self.capacity,)
+
+ def copy(self):
+ """Return a shallow copy of the instance."""
+ rv = self.__class__(self.capacity)
+ rv._mapping.update(self._mapping)
rv._queue.extend(self._queue)
- return rv
-
- def get(self, key, default=None):
- """Return an item from the cache dict or `default`"""
- try:
- return self[key]
- except KeyError:
- return default
-
- def setdefault(self, key, default=None):
- """Set `default` if the key is not in the cache otherwise
- leave unchanged. Return the value of this key.
- """
- try:
+ return rv
+
+ def get(self, key, default=None):
+ """Return an item from the cache dict or `default`"""
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def setdefault(self, key, default=None):
+ """Set `default` if the key is not in the cache otherwise
+ leave unchanged. Return the value of this key.
+ """
+ try:
return self[key]
except KeyError:
self[key] = default
return default
-
- def clear(self):
- """Clear the cache."""
- self._wlock.acquire()
- try:
- self._mapping.clear()
- self._queue.clear()
- finally:
- self._wlock.release()
-
- def __contains__(self, key):
- """Check if a key exists in this cache."""
- return key in self._mapping
-
- def __len__(self):
- """Return the current size of the cache."""
- return len(self._mapping)
-
- def __repr__(self):
+
+ def clear(self):
+ """Clear the cache."""
+ self._wlock.acquire()
+ try:
+ self._mapping.clear()
+ self._queue.clear()
+ finally:
+ self._wlock.release()
+
+ def __contains__(self, key):
+ """Check if a key exists in this cache."""
+ return key in self._mapping
+
+ def __len__(self):
+ """Return the current size of the cache."""
+ return len(self._mapping)
+
+ def __repr__(self):
return "<%s %r>" % (self.__class__.__name__, self._mapping)
-
- def __getitem__(self, key):
- """Get an item from the cache. Moves the item up so that it has the
- highest priority then.
-
- Raise a `KeyError` if it does not exist.
- """
- self._wlock.acquire()
- try:
- rv = self._mapping[key]
- if self._queue[-1] != key:
- try:
- self._remove(key)
- except ValueError:
- # if something removed the key from the container
- # when we read, ignore the ValueError that we would
- # get otherwise.
- pass
- self._append(key)
- return rv
- finally:
- self._wlock.release()
-
- def __setitem__(self, key, value):
- """Sets the value for an item. Moves the item up so that it
- has the highest priority then.
- """
- self._wlock.acquire()
- try:
- if key in self._mapping:
- self._remove(key)
- elif len(self._mapping) == self.capacity:
- del self._mapping[self._popleft()]
- self._append(key)
- self._mapping[key] = value
- finally:
- self._wlock.release()
-
- def __delitem__(self, key):
- """Remove an item from the cache dict.
- Raise a `KeyError` if it does not exist.
- """
- self._wlock.acquire()
- try:
- del self._mapping[key]
- try:
- self._remove(key)
- except ValueError:
- pass
- finally:
- self._wlock.release()
-
- def items(self):
- """Return a list of items."""
- result = [(key, self._mapping[key]) for key in list(self._queue)]
- result.reverse()
- return result
-
- def iteritems(self):
- """Iterate over all items."""
+
+ def __getitem__(self, key):
+ """Get an item from the cache. Moves the item up so that it has the
+ highest priority then.
+
+ Raise a `KeyError` if it does not exist.
+ """
+ self._wlock.acquire()
+ try:
+ rv = self._mapping[key]
+ if self._queue[-1] != key:
+ try:
+ self._remove(key)
+ except ValueError:
+ # if something removed the key from the container
+ # when we read, ignore the ValueError that we would
+ # get otherwise.
+ pass
+ self._append(key)
+ return rv
+ finally:
+ self._wlock.release()
+
+ def __setitem__(self, key, value):
+ """Sets the value for an item. Moves the item up so that it
+ has the highest priority then.
+ """
+ self._wlock.acquire()
+ try:
+ if key in self._mapping:
+ self._remove(key)
+ elif len(self._mapping) == self.capacity:
+ del self._mapping[self._popleft()]
+ self._append(key)
+ self._mapping[key] = value
+ finally:
+ self._wlock.release()
+
+ def __delitem__(self, key):
+ """Remove an item from the cache dict.
+ Raise a `KeyError` if it does not exist.
+ """
+ self._wlock.acquire()
+ try:
+ del self._mapping[key]
+ try:
+ self._remove(key)
+ except ValueError:
+ pass
+ finally:
+ self._wlock.release()
+
+ def items(self):
+ """Return a list of items."""
+ result = [(key, self._mapping[key]) for key in list(self._queue)]
+ result.reverse()
+ return result
+
+ def iteritems(self):
+ """Iterate over all items."""
warnings.warn(
"'iteritems()' will be removed in version 3.0. Use"
" 'iter(cache.items())' instead.",
DeprecationWarning,
stacklevel=2,
)
- return iter(self.items())
-
- def values(self):
- """Return a list of all values."""
- return [x[1] for x in self.items()]
-
- def itervalue(self):
- """Iterate over all values."""
+ return iter(self.items())
+
+ def values(self):
+ """Return a list of all values."""
+ return [x[1] for x in self.items()]
+
+ def itervalue(self):
+ """Iterate over all values."""
warnings.warn(
"'itervalue()' will be removed in version 3.0. Use"
" 'iter(cache.values())' instead.",
DeprecationWarning,
stacklevel=2,
)
- return iter(self.values())
-
+ return iter(self.values())
+
def itervalues(self):
"""Iterate over all values."""
warnings.warn(
@@ -508,14 +508,14 @@ class LRUCache(object):
)
return iter(self.values())
- def keys(self):
- """Return a list of all keys ordered by most recent usage."""
- return list(self)
-
- def iterkeys(self):
- """Iterate over all keys in the cache dict, ordered by
- the most recent usage.
- """
+ def keys(self):
+ """Return a list of all keys ordered by most recent usage."""
+ return list(self)
+
+ def iterkeys(self):
+ """Iterate over all keys in the cache dict, ordered by
+ the most recent usage.
+ """
warnings.warn(
"'iterkeys()' will be removed in version 3.0. Use"
" 'iter(cache.keys())' instead.",
@@ -525,96 +525,96 @@ class LRUCache(object):
return iter(self)
def __iter__(self):
- return reversed(tuple(self._queue))
-
- def __reversed__(self):
+ return reversed(tuple(self._queue))
+
+ def __reversed__(self):
"""Iterate over the keys in the cache dict, oldest items
- coming first.
- """
- return iter(tuple(self._queue))
-
- __copy__ = copy
-
-
+ coming first.
+ """
+ return iter(tuple(self._queue))
+
+ __copy__ = copy
+
+
abc.MutableMapping.register(LRUCache)
-
-
+
+
def select_autoescape(
enabled_extensions=("html", "htm", "xml"),
disabled_extensions=(),
default_for_string=True,
default=False,
):
- """Intelligently sets the initial value of autoescaping based on the
- filename of the template. This is the recommended way to configure
- autoescaping if you do not want to write a custom function yourself.
-
- If you want to enable it for all templates created from strings or
- for all templates with `.html` and `.xml` extensions::
-
- from jinja2 import Environment, select_autoescape
- env = Environment(autoescape=select_autoescape(
- enabled_extensions=('html', 'xml'),
- default_for_string=True,
- ))
-
- Example configuration to turn it on at all times except if the template
- ends with `.txt`::
-
- from jinja2 import Environment, select_autoescape
- env = Environment(autoescape=select_autoescape(
- disabled_extensions=('txt',),
- default_for_string=True,
- default=True,
- ))
-
- The `enabled_extensions` is an iterable of all the extensions that
- autoescaping should be enabled for. Likewise `disabled_extensions` is
- a list of all templates it should be disabled for. If a template is
- loaded from a string then the default from `default_for_string` is used.
- If nothing matches then the initial value of autoescaping is set to the
- value of `default`.
-
- For security reasons this function operates case insensitive.
-
- .. versionadded:: 2.9
- """
+ """Intelligently sets the initial value of autoescaping based on the
+ filename of the template. This is the recommended way to configure
+ autoescaping if you do not want to write a custom function yourself.
+
+ If you want to enable it for all templates created from strings or
+ for all templates with `.html` and `.xml` extensions::
+
+ from jinja2 import Environment, select_autoescape
+ env = Environment(autoescape=select_autoescape(
+ enabled_extensions=('html', 'xml'),
+ default_for_string=True,
+ ))
+
+ Example configuration to turn it on at all times except if the template
+ ends with `.txt`::
+
+ from jinja2 import Environment, select_autoescape
+ env = Environment(autoescape=select_autoescape(
+ disabled_extensions=('txt',),
+ default_for_string=True,
+ default=True,
+ ))
+
+ The `enabled_extensions` is an iterable of all the extensions that
+ autoescaping should be enabled for. Likewise `disabled_extensions` is
+ a list of all templates it should be disabled for. If a template is
+ loaded from a string then the default from `default_for_string` is used.
+ If nothing matches then the initial value of autoescaping is set to the
+ value of `default`.
+
+ For security reasons this function operates case insensitive.
+
+ .. versionadded:: 2.9
+ """
enabled_patterns = tuple("." + x.lstrip(".").lower() for x in enabled_extensions)
disabled_patterns = tuple("." + x.lstrip(".").lower() for x in disabled_extensions)
- def autoescape(template_name):
- if template_name is None:
- return default_for_string
- template_name = template_name.lower()
- if template_name.endswith(enabled_patterns):
- return True
- if template_name.endswith(disabled_patterns):
- return False
- return default
-
- return autoescape
-
-
-def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
- """Works exactly like :func:`dumps` but is safe for use in ``<script>``
- tags. It accepts the same arguments and returns a JSON string. Note that
- this is available in templates through the ``|tojson`` filter which will
- also mark the result as safe. Due to how this function escapes certain
- characters this is safe even if used outside of ``<script>`` tags.
-
- The following characters are escaped in strings:
-
- - ``<``
- - ``>``
- - ``&``
- - ``'``
-
- This makes it safe to embed such strings in any place in HTML with the
- notable exception of double quoted attributes. In that case single
- quote your attributes or HTML escape it in addition.
- """
- if dumper is None:
- dumper = json.dumps
+ def autoescape(template_name):
+ if template_name is None:
+ return default_for_string
+ template_name = template_name.lower()
+ if template_name.endswith(enabled_patterns):
+ return True
+ if template_name.endswith(disabled_patterns):
+ return False
+ return default
+
+ return autoescape
+
+
+def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
+ """Works exactly like :func:`dumps` but is safe for use in ``<script>``
+ tags. It accepts the same arguments and returns a JSON string. Note that
+ this is available in templates through the ``|tojson`` filter which will
+ also mark the result as safe. Due to how this function escapes certain
+ characters this is safe even if used outside of ``<script>`` tags.
+
+ The following characters are escaped in strings:
+
+ - ``<``
+ - ``>``
+ - ``&``
+ - ``'``
+
+ This makes it safe to embed such strings in any place in HTML with the
+ notable exception of double quoted attributes. In that case single
+ quote your attributes or HTML escape it in addition.
+ """
+ if dumper is None:
+ dumper = json.dumps
rv = (
dumper(obj, **kwargs)
.replace(u"<", u"\\u003c")
@@ -622,13 +622,13 @@ def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
.replace(u"&", u"\\u0026")
.replace(u"'", u"\\u0027")
)
- return Markup(rv)
-
-
-class Cycler(object):
+ return Markup(rv)
+
+
+class Cycler(object):
"""Cycle through values by yield them one at a time, then restarting
once the end is reached. Available as ``cycler`` in templates.
-
+
Similar to ``loop.cycle``, but can be used outside loops or across
multiple loops. For example, render a list of folders and files in a
list, alternating giving them "odd" and "even" classes.
@@ -651,80 +651,80 @@ class Cycler(object):
.. versionadded:: 2.1
"""
- def __init__(self, *items):
- if not items:
+ def __init__(self, *items):
+ if not items:
raise RuntimeError("at least one item has to be provided")
- self.items = items
+ self.items = items
self.pos = 0
-
- def reset(self):
+
+ def reset(self):
"""Resets the current item to the first item."""
- self.pos = 0
-
- @property
- def current(self):
+ self.pos = 0
+
+ @property
+ def current(self):
"""Return the current item. Equivalent to the item that will be
returned next time :meth:`next` is called.
"""
- return self.items[self.pos]
-
- def next(self):
+ return self.items[self.pos]
+
+ def next(self):
"""Return the current item, then advance :attr:`current` to the
next item.
"""
- rv = self.current
- self.pos = (self.pos + 1) % len(self.items)
- return rv
-
- __next__ = next
-
-
-class Joiner(object):
- """A joining helper for templates."""
-
+ rv = self.current
+ self.pos = (self.pos + 1) % len(self.items)
+ return rv
+
+ __next__ = next
+
+
+class Joiner(object):
+ """A joining helper for templates."""
+
def __init__(self, sep=u", "):
- self.sep = sep
- self.used = False
-
- def __call__(self):
- if not self.used:
- self.used = True
+ self.sep = sep
+ self.used = False
+
+ def __call__(self):
+ if not self.used:
+ self.used = True
return u""
- return self.sep
-
-
-class Namespace(object):
- """A namespace object that can hold arbitrary attributes. It may be
+ return self.sep
+
+
+class Namespace(object):
+ """A namespace object that can hold arbitrary attributes. It may be
initialized from a dictionary or with keyword arguments."""
-
+
def __init__(*args, **kwargs): # noqa: B902
- self, args = args[0], args[1:]
- self.__attrs = dict(*args, **kwargs)
-
- def __getattribute__(self, name):
+ self, args = args[0], args[1:]
+ self.__attrs = dict(*args, **kwargs)
+
+ def __getattribute__(self, name):
# __class__ is needed for the awaitable check in async mode
if name in {"_Namespace__attrs", "__class__"}:
- return object.__getattribute__(self, name)
- try:
- return self.__attrs[name]
- except KeyError:
- raise AttributeError(name)
-
- def __setitem__(self, name, value):
- self.__attrs[name] = value
-
- def __repr__(self):
+ return object.__getattribute__(self, name)
+ try:
+ return self.__attrs[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __setitem__(self, name, value):
+ self.__attrs[name] = value
+
+ def __repr__(self):
return "<Namespace %r>" % self.__attrs
-
-
-# does this python version support async for in and async generators?
-try:
+
+
+# does this python version support async for in and async generators?
+try:
exec("async def _():\n async for _ in ():\n yield _")
- have_async_gen = True
-except SyntaxError:
- have_async_gen = False
-
-
+ have_async_gen = True
+except SyntaxError:
+ have_async_gen = False
+
+
def soft_unicode(s):
from markupsafe import soft_unicode
diff --git a/contrib/python/Jinja2/py2/jinja2/visitor.py b/contrib/python/Jinja2/py2/jinja2/visitor.py
index d1365bf10e..7e65ca1bc0 100644
--- a/contrib/python/Jinja2/py2/jinja2/visitor.py
+++ b/contrib/python/Jinja2/py2/jinja2/visitor.py
@@ -1,81 +1,81 @@
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""API for traversing the AST nodes. Implemented by the compiler and
meta introspection.
-"""
+"""
from .nodes import Node
-
-
-class NodeVisitor(object):
- """Walks the abstract syntax tree and call visitor functions for every
- node found. The visitor functions may return values which will be
- forwarded by the `visit` method.
-
- Per default the visitor functions for the nodes are ``'visit_'`` +
- class name of the node. So a `TryFinally` node visit function would
- be `visit_TryFinally`. This behavior can be changed by overriding
- the `get_visitor` function. If no visitor function exists for a node
- (return value `None`) the `generic_visit` visitor is used instead.
- """
-
- def get_visitor(self, node):
- """Return the visitor function for this node or `None` if no visitor
- exists for this node. In that case the generic visit function is
- used instead.
- """
+
+
+class NodeVisitor(object):
+ """Walks the abstract syntax tree and call visitor functions for every
+ node found. The visitor functions may return values which will be
+ forwarded by the `visit` method.
+
+ Per default the visitor functions for the nodes are ``'visit_'`` +
+ class name of the node. So a `TryFinally` node visit function would
+ be `visit_TryFinally`. This behavior can be changed by overriding
+ the `get_visitor` function. If no visitor function exists for a node
+ (return value `None`) the `generic_visit` visitor is used instead.
+ """
+
+ def get_visitor(self, node):
+ """Return the visitor function for this node or `None` if no visitor
+ exists for this node. In that case the generic visit function is
+ used instead.
+ """
method = "visit_" + node.__class__.__name__
- return getattr(self, method, None)
-
- def visit(self, node, *args, **kwargs):
- """Visit a node."""
- f = self.get_visitor(node)
- if f is not None:
- return f(node, *args, **kwargs)
- return self.generic_visit(node, *args, **kwargs)
-
- def generic_visit(self, node, *args, **kwargs):
- """Called if no explicit visitor function exists for a node."""
- for node in node.iter_child_nodes():
- self.visit(node, *args, **kwargs)
-
-
-class NodeTransformer(NodeVisitor):
- """Walks the abstract syntax tree and allows modifications of nodes.
-
- The `NodeTransformer` will walk the AST and use the return value of the
- visitor functions to replace or remove the old node. If the return
- value of the visitor function is `None` the node will be removed
- from the previous location otherwise it's replaced with the return
- value. The return value may be the original node in which case no
- replacement takes place.
- """
-
- def generic_visit(self, node, *args, **kwargs):
- for field, old_value in node.iter_fields():
- if isinstance(old_value, list):
- new_values = []
- for value in old_value:
- if isinstance(value, Node):
- value = self.visit(value, *args, **kwargs)
- if value is None:
- continue
- elif not isinstance(value, Node):
- new_values.extend(value)
- continue
- new_values.append(value)
- old_value[:] = new_values
- elif isinstance(old_value, Node):
- new_node = self.visit(old_value, *args, **kwargs)
- if new_node is None:
- delattr(node, field)
- else:
- setattr(node, field, new_node)
- return node
-
- def visit_list(self, node, *args, **kwargs):
- """As transformers may return lists in some places this method
- can be used to enforce a list as return value.
- """
- rv = self.visit(node, *args, **kwargs)
- if not isinstance(rv, list):
- rv = [rv]
- return rv
+ return getattr(self, method, None)
+
+ def visit(self, node, *args, **kwargs):
+ """Visit a node."""
+ f = self.get_visitor(node)
+ if f is not None:
+ return f(node, *args, **kwargs)
+ return self.generic_visit(node, *args, **kwargs)
+
+ def generic_visit(self, node, *args, **kwargs):
+ """Called if no explicit visitor function exists for a node."""
+ for node in node.iter_child_nodes():
+ self.visit(node, *args, **kwargs)
+
+
+class NodeTransformer(NodeVisitor):
+ """Walks the abstract syntax tree and allows modifications of nodes.
+
+ The `NodeTransformer` will walk the AST and use the return value of the
+ visitor functions to replace or remove the old node. If the return
+ value of the visitor function is `None` the node will be removed
+ from the previous location otherwise it's replaced with the return
+ value. The return value may be the original node in which case no
+ replacement takes place.
+ """
+
+ def generic_visit(self, node, *args, **kwargs):
+ for field, old_value in node.iter_fields():
+ if isinstance(old_value, list):
+ new_values = []
+ for value in old_value:
+ if isinstance(value, Node):
+ value = self.visit(value, *args, **kwargs)
+ if value is None:
+ continue
+ elif not isinstance(value, Node):
+ new_values.extend(value)
+ continue
+ new_values.append(value)
+ old_value[:] = new_values
+ elif isinstance(old_value, Node):
+ new_node = self.visit(old_value, *args, **kwargs)
+ if new_node is None:
+ delattr(node, field)
+ else:
+ setattr(node, field, new_node)
+ return node
+
+ def visit_list(self, node, *args, **kwargs):
+ """As transformers may return lists in some places this method
+ can be used to enforce a list as return value.
+ """
+ rv = self.visit(node, *args, **kwargs)
+ if not isinstance(rv, list):
+ rv = [rv]
+ return rv