B g[at@sUdZddlZddlZddlmZddlmZddlm Z ddl m Z ddl mZdd lmZejrxddlZdd lmZed Zejejd fed <edZedZedejZedejej BZ!edejej BZ"e dZ#e dZ$e dZ%e dZ&e dZ'e dZ(e dZ)e dZ*e dZ+e dZ,e dZ-e dZ.e dZ/e d Z0e d!Z1e d"Z2e d#Z3e d$Z4e d%Z5e d&Z6e d'Z7e d(Z8e d)Z9e d*Z:e d+Z;e d,Ze d/Z?e d0Z@e d1ZAe d2ZBe d3ZCe d4ZDe d5ZEe d6ZFe d7ZGe d8ZHe d9ZIe d:ZJe d;ZKe d<ZLe d=ZMe d>ZNe d?ZOe d@ZPe dAZQe dBZRe dCZSe#e;e'e*e3e2e6ez>=sr?zoperators droppedr.r7ccs|]}t|VqdS)N)reescape)r;xr:r:r> srCcCs t| S)N)len)rBr:r:r>rE)keyr/) token_typereturncCsL|tkrt|Stdtdtdtdtdtdtdtdt dt d t d t d i ||S) Nzbegin of commentzend of commentr"zbegin of statement blockzend of statement blockzbegin of print statementzend of print statementzbegin of line statementzend of line statementztemplate data / textzend of template)reverse_operatorsTOKEN_COMMENT_BEGINTOKEN_COMMENT_END TOKEN_COMMENTTOKEN_LINECOMMENTTOKEN_BLOCK_BEGINTOKEN_BLOCK_ENDTOKEN_VARIABLE_BEGINTOKEN_VARIABLE_ENDTOKEN_LINESTATEMENT_BEGINTOKEN_LINESTATEMENT_END TOKEN_DATA TOKEN_EOFget)rHr:r:r>_describe_token_typesrXToken)tokenrIcCs|jtkr|jSt|jS)z#Returns a description of the token.)type TOKEN_NAMEvaluerX)rZr:r:r>describe_tokens r^)exprrIcCs2d|kr&|dd\}}|tkr*|Sn|}t|S)z0Like `describe_token` but for token expressions.r6r)splitr\rX)r_r[r]r:r:r>describe_token_exprs ra)r]rIcCstt|S)zsCount the number of newline characters in the string. This is useful for extensions that filter a stream. )rD newline_refindall)r]r:r:r>count_newlinessrdr ) environmentrIcCstj}t|jt||jft|jt||jft|jt||jfg}|j dk rp| t|j t d||j f|j dk r| t|j t d||j fddt|ddDS)zACompiles all the rules from the environment into a list of rules.Nz ^[ \t\v]*z(?:^|(?<=\S))[^\S\r\n]*cSsg|]}|ddqS)rNr:)r;rBr:r:r> sz!compile_rules..T)reverse)r@rArDcomment_start_stringrKblock_start_stringrOvariable_start_stringrQline_statement_prefixappendrSline_comment_prefixTOKEN_LINECOMMENT_BEGINsorted)reerulesr:r:r> compile_ruless*    rrc@s>eZdZdZefeejeddddZe edddd Z dS) FailurezjClass that raises a `TemplateSyntaxError` if called. Used by the `Lexer` to specify known errors. N)messageclsrIcCs||_||_dS)N)rt error_class)selfrtrur:r:r>__init__szFailure.__init__z te.NoReturn)linenofilenamerIcCs||j||dS)N)rvrt)rwryrzr:r:r>__call__szFailure.__call__) __name__ __module__ __qualname____doc__rstrtTyperxintr{r:r:r:r>rssrsc@sTeZdZUeed<eed<eed<edddZeeddd Zeed d d Z d S)rYryr[r])rIcCst|S)N)r^)rwr:r:r>__str__sz Token.__str__)r_rIcCs2|j|krdSd|kr.|dd|j|jgkSdS)zTest a token against a token expression. This can either be a token type or ``'token_type:token_value'``. This can only test against string values and types. Tr6rF)r[r`r])rwr_r:r:r>tests  z Token.test)iterablerIcstfdd|DS)z(Test against multiple token expressions.c3s|]}|VqdS)N)r)r;r_)rwr:r>rC$sz!Token.test_any..)any)rwrr:)rwr>test_any"szToken.test_anyN) r|r}r~r__annotations__rrboolrrr:r:r:r>rY s c@s<eZdZdZdddddZdddd Zedd d ZdS) TokenStreamIteratorz`The iterator for tokenstreams. Iterate over the stream until the eof token is reached. TokenStreamN)streamrIcCs ||_dS)N)r)rwrr:r:r>rx,szTokenStreamIterator.__init__)rIcCs|S)Nr:)rwr:r:r>__iter__/szTokenStreamIterator.__iter__cCs.|jj}|jtkr |jtt|j|S)N)rcurrentr[rVclose StopIterationnext)rwrZr:r:r>__next__2s    zTokenStreamIterator.__next__)r|r}r~rrxrrYrr:r:r:r>r'src@seZdZdZejeejeejedddZ e dddZ e ddd Z ee dd d Zed d ddZedddZd!ed dddZeejedddZee dddZedddZd dddZeeddd Zd S)"rzA token stream is an iterable that yields :class:`Token`\s. The parser however does not iterate over it but calls :meth:`next` to go one token ahead. The current active token is stored as :attr:`current`. ) generatorrrzcCs>t||_t|_||_||_d|_tdtd|_ t |dS)NFr) iter_iterr_pushedrrzclosedrY TOKEN_INITIALrr)rwrrrzr:r:r>rxCs zTokenStream.__init__)rIcCst|S)N)r)rwr:r:r>rQszTokenStream.__iter__cCst|jp|jjtk S)N)rrrr[rV)rwr:r:r>__bool__TszTokenStream.__bool__cCs| S)z Are we at the end of the stream?r:)rwr:r:r>eosWszTokenStream.eosN)rZrIcCs|j|dS)z Push a token back to the stream.N)rrl)rwrZr:r:r>push\szTokenStream.pushcCs"t|}|j}||||_|S)zLook at the next token.)rrr)rwZ old_tokenresultr:r:r>look`s  zTokenStream.lookr)nrIcCsxt|D] }t|q WdS)zGot n tokens ahead.N)ranger)rwr_r:r:r>skiphszTokenStream.skip)r_rIcCs|j|rt|SdS)zqPerform the token test and return the token if it matched. Otherwise the return value is `None`. N)rrr)rwr_r:r:r>next_ifms zTokenStream.next_ifcCs||dk S)z8Like :meth:`next_if` but only returns `True` or `False`.N)r)rwr_r:r:r>skip_ifvszTokenStream.skip_ifcCsX|j}|jr|j|_n:|jjtk rTyt|j|_Wntk rR|YnX|S)z|Go one token ahead and return the old one. Use the built-in :func:`next` instead of calling this directly. ) rrpopleftr[rVrrrr)rwrvr:r:r>rzs zTokenStream.__next__cCs&t|jjtd|_td|_d|_dS)zClose the stream.rr:TN)rYrryrVrrr)rwr:r:r>rs zTokenStream.closecCsn|j|sft|}|jjtkr>td|d|jj|j|jtd|dt |j|jj|j|jt |S)z}Expect a given token type and return it. This accepts the same argument as :meth:`jinja2.lexer.Token.test`. z%unexpected end of template, expected r5zexpected token z, got ) rrrar[rVrryrrzr^r)rwr_r:r:r>expects   zTokenStream.expect)r)r|r}r~rrIterablerYOptionalrrxrrrrpropertyrrrrrrrrrrr:r:r:r>r=s   rc CsZ|j|j|j|j|j|j|j|j|j|j |j |j f }t |}|dkrVt|t |<}|S)z(Return a lexer which is probably cached.N)riblock_end_stringrjvariable_end_stringrhcomment_end_stringrkrm trim_blocks lstrip_blocksnewline_sequencekeep_trailing_newliner rWr )rerGZlexerr:r:r> get_lexers  rcs$eZdZdZdZfddZZS)OptionalLStripzWA special tuple for marking a point in the state that can have lstrip applied. r:cst||S)N)super__new__)rumemberskwargs) __class__r:r>rszOptionalLStrip.__new__)r|r}r~r __slots__r __classcell__r:r:)rr>rsrc@sNeZdZUejeed<ejeejedfeje fed<ej eed<dS)_Ruler.tokenscommandN) r|r}r~rPatternrrUnionTuplersrr:r:r:r>rs $rc @seZdZdZdddddZeeddd Zdeejeejeejee d d d Z dej ej e eefejeejeejed ddZdeejeejeejeejej e eefd ddZdS)r a Class that implements a lexer for a given environment. Automatically created by the environment class, usually you don't have to do that. Note that the lexer is not automatically bound to an environment. Multiple environments can share the same lexer. r N)rerIc Cs0tj}ttjtddd}tttdttt dtt t dtt t dtttdtttdg}t|}||j}||j}||j}||j} |jrdnd} |jr|dnd|_|j|_|j|_d|d|d |d } d | gd d |D} dt|d| dttddt|dtdgtt|d|d|d || d t t!fdt|dt"dfdgt#t|d|d|d || dt$dg|t%t|d| d | t&dg|t't|d|d|d|d || d ttt(dt|dt"dfdgt)t|dt*dg|t+t|dt,t-fdgi|_.dS)N)rBrIcSst|tjtjBS)N)r@compileMS)rBr:r:r>cszLexer.__init__..cz\n?rz[^ \t]z(?Pz(\-|\+|)\s*raw\s*(?:\-z\s*|z))r7cSs"g|]\}}d|d|dqS)z(?Prf sz"Lexer.__init__..rootz(.*?)(?:r/z#bygroupz.+z (.*?)((?:\+z|\-z#popz(.)zMissing end of comment tagz(?:\+z\-z (.*?)((?:z(\-|\+|))\s*endraw\s*(?:\+zMissing end of raw directivez \s*(\n|$)z(.*?)()(?=\n|$))/r@rArrrr whitespace_reTOKEN_WHITESPACEfloat_re TOKEN_FLOAT integer_re TOKEN_INTEGERname_rer\ string_re TOKEN_STRING operator_reTOKEN_OPERATORrrrirrrrrlstrip_unless_rerrjoinrrUrKrMrLrsrOrPrQrRTOKEN_RAW_BEGIN TOKEN_RAW_ENDrSrTrnrNTOKEN_LINECOMMENT_ENDrq) rwrerprZ tag_rulesZroot_tag_rulesZblock_start_reZ block_end_reZcomment_end_reZvariable_end_reZblock_suffix_reZ root_raw_reZ root_parts_rer:r:r>rxsz          "zLexer.__init__)r]rIcCst|j|S)z`Replace all newlines with the configured sequence in strings and template data. )rbrr)rwr]r:r:r>_normalize_newlinesVszLexer._normalize_newlines)sourcerrzstaterIcCs&|||||}t||||||S)z:Calls tokeniter + tokenize and wraps it in a token stream.) tokeniterrwrap)rwrrrzrrr:r:r>tokenize\szLexer.tokenize)rrrzrIc csxxp|D]f\}}}|tkrq|}|tkr2t}n0|tkrBt}n |ttfkrTqn|tkrh||}n|dkrv|}n|t kr|}| st d|||n|t kry$||dd ddd}WnJtk r}z*t|dd} t | ||||Wd d }~XYnXnL|tkr4t|d d d }n.|tkrPt|d d }n|tkrbt|}t|||VqWd S) zThis is called with the stream as returned by `tokenize` and wraps every token in a :class:`Token` and converts the value. keywordzInvalid character in identifierrasciibackslashreplacezunicode-escaper6Nrrr)ignored_tokensrSrOrTrPrrrUrr\ isidentifierrrencodedecode Exceptionrr`striprrreplacerrr operatorsrY) rwrrrzryrZZ value_strr]rpmsgr:r:r>rgsD     $   z Lexer.wrapccsVt|ddd}|js,|ddkr,|d=d|}d}d}dg}|dk rr|dkrr|d ksdtd ||d |j|d} t|} g} |j} d} d }xx| D]v\}}}| ||}|dkrq| r|t t t fkrqt |tr|}t |tr|d}td d|dddD}|dkr\|}|t|dd} |f|dd}nj|dkr| dk r|ts|dd}|dks|r| ||s|d|f|dd}xt|D]\}}|jtkr|||n|dkrLx|D]0\}}|dk r|||fV||d7}PqWt|dn:||}|sd|tkrp|||fV||d| 7}d} qWn|}|t kr6|dkr| dn|dkr| dnj|dkr| dnT|dkr6| st!d|d|||| "}||kr6t!d|d|d||||sF|tkrR|||fV||d7}|dddk}|#}|dk r|dkr|"nV|dkrxJ|D] \}}|dk r||PqWt|dn |||j|d} n||krt|d|}PqW|| kr0dSt!d ||d!||||qWdS)"aThis method tokenizes the text and returns the tokens in a generator. Use this method if you just want to tokenize a template. .. versionchanged:: 3.0 Only ``\n``, ``\r\n`` and ``\r`` are treated as line breaks. Nrr rrr)variableblockz invalid stateZ_beginTcss|]}|dk r|VqdS)Nr:)r;gr:r:r>rCsz"Lexer.tokeniter..r'r&z#bygroupz= wanted to resolve the token dynamically but no group matchedr0r1r.r/r,r-)r1r/r-z unexpected ''z ', expected 'z#popzA wanted to resolve the new state dynamically but no group matchedz* yielded empty string without stack changezunexpected char z at )$rbr`rrAssertionErrorrlrqrDrmatchrRrPrT isinstancetuplegroupsrrrstripcount groupdictrWrQrfindsearch enumeraterrsitems RuntimeErrorignore_if_emptygrouprrpopend)rwrrrzrlinesposrystackZ statetokensZ source_lengthZbalancing_stackrZnewlines_strippedZ line_startingZregexrZ new_statemrtextZ strip_signstrippedZl_posidxrZrGr]r#Z expected_oppos2r:r:r>rs                                  zLexer.tokeniter)NNN)NN)NN)r|r}r~rrxrrrrrrrrrIteratorrYrrr:r:r:r>r sy &5)qrr@typingrastr collectionsrsysrZ _identifierrr exceptionsrutilsr TYPE_CHECKINGZtyping_extensionsterer r MutableMappingrrrrrbrr IGNORECASEVERBOSErrZ TOKEN_ADDZ TOKEN_ASSIGNZ TOKEN_COLONZ TOKEN_COMMAZ TOKEN_DIVZ TOKEN_DOTZTOKEN_EQZTOKEN_FLOORDIVZTOKEN_GTZ TOKEN_GTEQZ TOKEN_LBRACEZTOKEN_LBRACKETZ TOKEN_LPARENZTOKEN_LTZ TOKEN_LTEQZ TOKEN_MODZ TOKEN_MULZTOKEN_NEZ TOKEN_PIPEZ TOKEN_POWZ TOKEN_RBRACEZTOKEN_RBRACKETZ TOKEN_RPARENZTOKEN_SEMICOLONZ TOKEN_SUBZ TOKEN_TILDErrrr\rrrOrPrQrRrrrKrLrMrSrTrnrrNrUrrVrrrJrDrrror frozensetrrrrXr^rarrdListrrrs NamedTuplerYrrrrrrr r:r:r:r>s            *  )m