https://t.me/AnonymousX5
Server : Apache
System : Linux cvar2.toservers.com 3.10.0-962.3.2.lve1.5.73.el7.x86_64 #1 SMP Wed Aug 24 21:31:23 UTC 2022 x86_64
User : njnconst ( 1116)
PHP Version : 8.4.18
Disable Function : NONE
Directory :  /lib/python2.7/site-packages/jinja2/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Current File : //lib/python2.7/site-packages/jinja2/lexer.pyc
�
-/�_c@s�dZddlZddlmZddlmZddlmZddlm	Z	ddlm
Z
dd	lmZdd
lmZddl
mZddlmZed
�Zejdej�Zejd�Zejdej�Zejd�ZejdejejB�Zyeddd�Wn&ek
rNejd�ZeZnXddl m!Ze"Ze
d�Z#e
d�Z$e
d�Z%e
d�Z&e
d�Z'e
d�Z(e
d�Z)e
d�Z*e
d �Z+e
d!�Z,e
d"�Z-e
d#�Z.e
d$�Z/e
d%�Z0e
d&�Z1e
d'�Z2e
d(�Z3e
d)�Z4e
d*�Z5e
d+�Z6e
d,�Z7e
d-�Z8e
d.�Z9e
d/�Z:e
d0�Z;e
d1�Z<e
d2�Z=e
d3�Z>e
d4�Z?e
d5�Z@e
d6�ZAe
d7�ZBe
d8�ZCe
d9�ZDe
d:�ZEe
d;�ZFe
d<�ZGe
d=�ZHe
d>�ZIe
d?�ZJe
d@�ZKe
dA�ZLe
dB�ZMe
dC�ZNe
dD�ZOe
dE�ZPe
dF�ZQe
dG�ZRe
dH�ZSie#dI6e;dJ6e'dK6e*dL6e3dM6e2dN6e6dO6e<dP6e.dQ6e8dR6e/dS6e9dT6e-dU6e7dV6e)dW6e4dX6e+dY6e,dZ6e0d[6e1d\6e$d]6e(d^6e%d_6e5d`6e&da6e:db6ZTeUgeeT�D]\ZVZWeWeVf^q}�ZXeYeT�eYeX�ks�tZdc��ejddd`j[de�e\eTdfdg��D���Z]e^eIeKeJe=eNeOePg�Z_e^e=eQeKePg�Z`dh�Zadi�Zbdj�Zcdk�Zddl�Zedmeffdn��YZgdoehfdp��YZie	dqeffdr��Y�Zje	dseffdt��Y�Zkdu�Zldvehfdw��YZmdxeffdy��YZndS(zs�Implements a Jinja / Python combination lexer. The ``Lexer`` class
is used to do some preprocessing. It filters out invalid operators like
the bitshift operators we don't allow in templates. It separates
template code and python code in expressions.
i����N(tliteral_eval(tdeque(t
itemgetteri(timplements_iterator(tintern(t	iteritems(t	text_type(tTemplateSyntaxError(tLRUCachei2s\s+s(\r\n|\r|\n)s7('([^'\\]*(?:\\.[^'\\]*)*)'|"([^"\\]*(?:\\.[^"\\]*)*)")s
(\d+_)*\d+s�
    (?<!\.)  # doesn't start with a .
    (\d+_)*\d+  # digits, possibly _ separated
    (
        (\.(\d+_)*\d+)?  # optional fractional part
        e[+\-]?(\d+_)*\d+  # exponent part
    |
        \.(\d+_)*\d+  # required fractional part
    )
    sföös	<unknown>tevals[a-zA-Z_][a-zA-Z0-9_]*(tpatterntaddtassigntcolontcommatdivtdotteqtfloordivtgttgteqtlbracetlbrackettlparentlttlteqtmodtmultnetpipetpowtrbracetrbrackettrparent	semicolontsubttildet
whitespacetfloattintegertnametstringtoperatortblock_begint	block_endtvariable_begintvariable_endt	raw_begintraw_endt
comment_begintcomment_endtcommenttlinestatement_begintlinestatement_endtlinecomment_begintlinecomment_endtlinecommenttdatatinitialteoft+t-t/s//t*t%s**t~t[t]t(t)t{t}s==s!=t>s>=t<s<=t=t.t:t|t,t;soperators droppeds(%s)ccs|]}tj|�VqdS(N(tretescape(t.0tx((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pys	<genexpr>�stkeycCst|�S(N(tlen(RS((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt<lambda>�scCsx|tkrt|Sidt6dt6dt6dt6dt6dt6dt6dt6dt	6d	t
6d
t6dt6j
||�S(Nsbegin of commentsend of commentR3sbegin of statement blocksend of statement blocksbegin of print statementsend of print statementsbegin of line statementsend of line statementstemplate data / textsend of template(treverse_operatorstTOKEN_COMMENT_BEGINtTOKEN_COMMENT_ENDt
TOKEN_COMMENTtTOKEN_LINECOMMENTtTOKEN_BLOCK_BEGINtTOKEN_BLOCK_ENDtTOKEN_VARIABLE_BEGINtTOKEN_VARIABLE_ENDtTOKEN_LINESTATEMENT_BEGINtTOKEN_LINESTATEMENT_ENDt
TOKEN_DATAt	TOKEN_EOFtget(t
token_type((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt_describe_token_type�s 
cCs#|jtkr|jSt|j�S(s#Returns a description of the token.(ttypet
TOKEN_NAMEtvalueRf(ttoken((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytdescribe_token�scCsGd|kr7|jdd�\}}|tkr=|Sn|}t|�S(s0Like `describe_token` but for token expressions.RLi(tsplitRhRf(texprRgRi((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytdescribe_token_expr�scCsttj|��S(ssCount the number of newline characters in the string.  This is
    useful for extensions that filter a stream.
    (RUt
newline_retfindall(Ri((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytcount_newlines�scCstj}t|j�t||j�ft|j�t||j�ft|j�t||j�fg}|j	dk	r�|jt|j	�td||j	�f�n|j
dk	r�|jt|j
�td||j
�f�ngt|dt�D]}|d^q�S(sACompiles all the rules from the environment into a list of rules.s	^[ \t\v]*s(?:^|(?<=\S))[^\S\r\n]*treverseiN(RPRQRUtcomment_start_stringRXtblock_start_stringR\tvariable_start_stringR^tline_statement_prefixtNonetappendR`tline_comment_prefixtTOKEN_LINECOMMENT_BEGINtsortedtTrue(tenvironmenttetrulesRS((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt
compile_rules�s*	tFailurecBs#eZdZed�Zd�ZRS(sjClass that raises a `TemplateSyntaxError` if called.
    Used by the `Lexer` to specify known errors.
    cCs||_||_dS(N(tmessageterror_class(tselfR�tcls((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__init__�s	cCs|j|j||��dS(N(R�R�(R�tlinenotfilename((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__call__�s(t__name__t
__module__t__doc__RR�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��stTokencBs`eZdZdZd�ed�D�\ZZZd�Zd�Z	d�Z
d�Zd�ZRS(	sToken class.ccs!|]}tt|��VqdS(N(tpropertyR(RRRS((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pys	<genexpr>sicCs%tj||tt|��|f�S(N(ttuplet__new__Rtstr(R�R�RgRi((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�	scCs7|jtkrt|jS|jdkr0|jS|jS(NR((RgRWRi(R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__str__s
cCsE|j|krtSd|krA|jdd�|j|jgkStS(s�Test a token against a token expression.  This can either be a
        token type or ``'token_type:token_value'``.  This can only test
        against string values and types.
        RLi(RgR|RlRitFalse(R�Rm((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyttests
"cGs(x!|D]}|j|�rtSqWtS(s(Test against multiple token expressions.(R�R|R�(R�titerableRm((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyttest_any s
cCsd|j|j|jfS(NsToken(%r, %r, %r)(R�RgRi(R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__repr__'s((
R�R�R�t	__slots__trangeR�RgRiR�R�R�R�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�s			
	tTokenStreamIteratorcBs)eZdZd�Zd�Zd�ZRS(s`The iterator for tokenstreams.  Iterate over the stream
    until the eof token is reached.
    cCs
||_dS(N(tstream(R�R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�1scCs|S(N((R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__iter__4scCsE|jj}|jtkr4|jj�t��nt|j�|S(N(R�tcurrentRgRctcloset
StopIterationtnext(R�Rj((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__next__7s

(R�R�R�R�R�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�+s		tTokenStreamcBs�eZdZd�Zd�Zd�ZeZed��Zd�Z	d�Z
dd�Zd	�Zd
�Z
d�Zd�Zd
�ZRS(s�A token stream is an iterable that yields :class:`Token`\s.  The
    parser however does not iterate over it but calls :meth:`next` to go
    one token ahead.  The current active token is stored as :attr:`current`.
    cCsYt|�|_t�|_||_||_t|_tdt	d�|_
t|�dS(Nit(titert_iterRt_pushedR(R�R�tclosedR�t
TOKEN_INITIALR�R�(R�t	generatorR(R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�Gs			cCs
t|�S(N(R�(R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�PscCst|j�p|jjtk	S(N(tboolR�R�RgRc(R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt__bool__SscCs|S(s Are we at the end of the stream?((R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyteosXscCs|jj|�dS(s Push a token back to the stream.N(R�Rx(R�Rj((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytpush]scCs/t|�}|j}|j|�||_|S(sLook at the next token.(R�R�R�(R�t	old_tokentresult((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytlookas
	
	icCs%xt|�D]}t|�q
WdS(sGot n tokens ahead.N(R�R�(R�tnt_((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytskipiscCs |jj|�rt|�SdS(sqPerform the token test and return the token if it matched.
        Otherwise the return value is `None`.
        N(R�R�R�(R�Rm((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytnext_ifnscCs|j|�dk	S(s8Like :meth:`next_if` but only returns `True` or `False`.N(R�Rw(R�Rm((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytskip_ifuscCst|j}|jr'|jj�|_nI|jjtk	rpyt|j�|_Wqptk
rl|j�qpXn|S(s|Go one token ahead and return the old one.

        Use the built-in :func:`next` instead of calling this directly.
        (	R�R�tpopleftRgRcR�R�R�R�(R�trv((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�ys		
cCs1t|jjtd�|_d|_t|_dS(sClose the stream.R�N(R�R�R�RcRwR�R|R�(R�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��s	cCs�|jj|�s�t|�}|jjtkrXtd||jj|j|j��ntd|t	|j�f|jj|j|j��nz|jSWdt
|�XdS(s}Expect a given token type and return it.  This accepts the same
        argument as :meth:`jinja2.lexer.Token.test`.
        s(unexpected end of template, expected %r.sexpected token %r, got %rN(R�R�RnRgRcRR�R(R�RkR�(R�Rm((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytexpect�s 		(R�R�R�R�R�R�t__nonzero__R�R�R�R�R�R�R�R�R�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�@s										cCs�|j|j|j|j|j|j|j|j|j|j	|j
|jf}tj
|�}|dkr�t|�}|t|<n|S(s(Return a lexer which is probably cached.N(Rttblock_end_stringRutvariable_end_stringRstcomment_end_stringRvRyttrim_blockst
lstrip_blockstnewline_sequencetkeep_trailing_newlinet_lexer_cacheRdRwtLexer(R}RTtlexer((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt	get_lexer�s"
tOptionalLStripcBseZdZdZd�ZRS(sWA special tuple for marking a point in the state that can have
    lstrip applied.
    cOstt|�j||�S(N(tsuperR�R�(R�tmemberstkwargs((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��s((R�R�R�R�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��sR�cBsPeZdZd�Zd�Zdddd�Zddd�Zddd�ZRS(s
Class that implements a lexer for a given environment. Automatically
    created by the environment class, usually you don't have to do that.

    Note that the lexer is not automatically bound to an environment.
    Multiple environments can share the same lexer.
    c	Cs�tj}d�}ttdfttdfttdft	t
dfttdft
tdfg}t|�}|jr{dp~d}|jr�|d�nd|_|j|_|j|_i|ddjd||j�||j�||j�fgg|D]\}}d||f^q���ttd	�d	f|d
�tdfgd6|d||j�||j�|f�ttfd
f|d�td�fdfgt6|d||j�||j�|f�td
fg|t 6|d||j!�||j!�f�t"d
fg|t#6|d||j�||j�||j�|f�ttt$�d
f|d�td�fdfgt%6|d�t&d
fg|t'6|d�t(t)fd
fgt*6|_+dS(NcSstj|tjtjB�S(N(RPtcompiletMtS(RS((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pytc�ss\n?R�s[^ \t]s(.*?)(?:%s)RMs0(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))s(?P<%s>%s(\-|\+|))s#bygroups.+troots(.*?)((?:\-%s\s*|%s)%s)s#pops(.)sMissing end of comment tags(?:\-%s\s*|%s)%ss
\-%s\s*|%ss1(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))sMissing end of raw directives	\s*(\n|$)s(.*?)()(?=\n|$)(,RPRQt
whitespace_retTOKEN_WHITESPACERwtfloat_retTOKEN_FLOATt
integer_ret
TOKEN_INTEGERtname_reRht	string_retTOKEN_STRINGtoperator_retTOKEN_OPERATORR�R�R�tlstrip_unless_reR�R�tjoinRtR�R�RbR�RZRYR�RXR]R\R�R_R^t
TOKEN_RAW_ENDtTOKEN_RAW_BEGINRaR`R[tTOKEN_LINECOMMENT_ENDRzR(	R�R}R~R�t	tag_rulestroot_tag_rulestblock_suffix_reR�tr((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��s~			+
	"
		
"		cCstj|j|�S(s@Called for strings and template data to normalize it to unicode.(RoR#R�(R�Ri((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt_normalize_newlinesascCs7|j||||�}t|j|||�||�S(s:Calls tokeniter + tokenize and wraps it in a token stream.(t	tokeniterR�twrap(R�tsourceR(R�tstateR�((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyttokenizeesc	cs�x�|D]�\}}}|tkr(qn�|tkr=t}n�|tkrRt}n�|ttfkrjqnh|tkr�|j|�}nJ|dkr�|}n5|t	kr�t
|�}tr�|j�r�t
d|||��q�n�|tkrky/|j|dd!�jdd�jd�}Wq�tk
rg}t
|�jd�dj�}t
||||��q�Xng|tkr�t|jd	d
��}n@|tkr�t|jd	d
��}n|tkr�t|}nt|||�VqWdS(s�This is called with the stream as returned by `tokenize` and wraps
        every token in a :class:`Token` and converts the value.
        tkeywordsInvalid character in identifierii����tasciitbackslashreplacesunicode-escapeRLR�R�N(tignored_tokensR`R\RaR]R�R�RbR�RhR�tcheck_identtisidentifierRR�tencodetdecodet	ExceptionRltstripR�tinttreplaceR�RR�t	operatorsR�(	R�R�R(R�R�RjRiR~tmsg((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR�jsB			

c cs�t|�}|j�}|jr[|r[x1d D]&}|j|�r.|jd�Pq.q.Wndj|�}d}d}dg}	|dk	r�|dkr�|d!ks�td
��|	j|d�n|j|	d}
t	|�}g}|j
}
d}t}x�x�|
D]q\}}}|j||�}|dkr8qn|rY|t
ttfkrYqnt|t�ry|j�}t|t�r{|d}td
�|ddd�D��}|dkr�|j�}|t	|�jd�}|f|d}q{|dkr{|
dk	r{|j�jt�r{|jd�d}|dksH|rx|
j||�su|| f|d}quqxq{nxt|�D]�\}}|jtkr�|||��q�|dkr(x�t|j��D]=\}}|dk	r�|||fV||jd�7}Pq�q�Wt d|��q�||}|sD|t!krU|||fVn||jd�|7}d}q�Wn|j"�}|t#krS|dkr�|jd�qS|dkr�|jd�qS|dkr�|jd�qS|d"krS|st$d||||��n|j%�}||krPt$d||f|||��qPqSn|se|t!krv|||fVn||jd�7}|j"�ddk}|j&�}|dk	rP|dkr�|	j%�nl|dkr/x]t|j��D])\}}|dk	r�|	j|�Pq�q�Wt d|��n
|	j|�|j|	d}
n||krot d|��n|}PqW||kr�dSt$d|||f|||��q�dS(#s�This method tokenizes the text and returns the tokens in a
        generator.  Use this method if you just want to tokenize a template.
        s
s
s
R�iiR�tvariabletblocks
invalid statet_begini����css!|]}|dk	r|VqdS(N(Rw(RRtg((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pys	<genexpr>�siNR=R<s#bygroups?%r wanted to resolve the token dynamically but no group matchedRFRGRDRERBRCsunexpected '%s'sunexpected '%s', expected '%s's#popsC%r wanted to resolve the new state dynamically but no group matcheds,%r yielded empty string without stack changesunexpected char %r at %d(s
s
s
(svariablesblock(RGRERC('Rt
splitlinesR�tendswithRxR�RwtAssertionErrorRRUR�R|tmatchR_R]Rat
isinstanceR�tgroupsR�R�trstriptcountt	groupdictRdR^trfindtsearcht	enumeratet	__class__R�RtRuntimeErrortignore_if_emptytgroupR�Rtpoptend( R�R�R(R�R�tlinestnewlinetposR�tstacktstatetokenst
source_lengthtbalancing_stackR�tnewlines_strippedt
line_startingtregexttokenst	new_statetmR�ttextt
strip_signtstrippedtl_postidxRjRTRiR9texpected_optpos2((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��s�

			
#!







N(	R�R�R�R�R�RwR�R�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyR��s	�	,(oR�RPtastRtcollectionsRR*Rt_compatRRRRt
exceptionsRtutilsRR�R�tUR�RoR�R�R�t
IGNORECASEtVERBOSER�tSyntaxErrorR�R�R�t_identifierR
R|t	TOKEN_ADDtTOKEN_ASSIGNtTOKEN_COLONtTOKEN_COMMAt	TOKEN_DIVt	TOKEN_DOTtTOKEN_EQtTOKEN_FLOORDIVtTOKEN_GTt
TOKEN_GTEQtTOKEN_LBRACEtTOKEN_LBRACKETtTOKEN_LPARENtTOKEN_LTt
TOKEN_LTEQt	TOKEN_MODt	TOKEN_MULtTOKEN_NEt
TOKEN_PIPEt	TOKEN_POWtTOKEN_RBRACEtTOKEN_RBRACKETtTOKEN_RPARENtTOKEN_SEMICOLONt	TOKEN_SUBtTOKEN_TILDER�R�R�RhR�R�R\R]R^R_R�R�RXRYRZR`RaRzR�R[RbR�RcR�tdicttktvRWRUR�R�R{R�t	frozensetR�RRfRkRnRqR�tobjectR�R�R�R�R�R�R�R�(((s./tmp/pip-install-sTXtzD/Jinja2/jinja2/lexer.pyt<module>s



1$/					)
(f	

https://t.me/AnonymousX5 - 2025