%PDF- %PDF-
Direktori : /usr/local/lib/python3.8/lib2to3/pgen2/__pycache__/ |
Current File : //usr/local/lib/python3.8/lib2to3/pgen2/__pycache__/tokenize.cpython-38.pyc |
U qa?R @ s d Z dZdZddlZddlZddlmZmZ ddlT ddl m Z d d ee D ddd g Z[ ze W n ek r~ eZ Y nX dd Zdd Zdd Zdd ZdZdZeede ee ZdZdZdZdZeddZeeeeeZdZed d!ee Zd"e ZeeeZ ed#e d$ Z!ee!e eZ"d%Z#d&Z$d'Z%d(Z&d)Z'ee'd* e'd+ Z(ee'd, e'd- Z)ed.d/d0d1d2d3d4d5d6 Z*d7Z+ed8d9d:Z,ee*e+e,Z-ee"e-e)eZ.ee. Z/ee'd; ed<d e'd= ed>d Z0edee(Z1eee1e"e-e0e Z2e3ej4e/e2e%e&f\Z5Z6Z7Z8ed?d@dAdBed?d@dCdDB dEdFdGdHdIdJhB Z9e4e#e4e$e7e8dKdLdM e9D dNdM e9D dOdM e9D Z:d*d+hdPdQ e9D B dRdQ e9D B Z;d<d>hdSdQ e9D B dTdQ e9D B Z<dUZ=G dVdW dWe>Z?G dXdY dYe>Z@dZd[ ZAeAfd\dZBd]d^ ZCG d_d` d`ZDe4daejEZFe4dbejEZGdcdd ZHdedf ZIdgd ZJdhd ZKeLdikrddlMZMeNeMjOdkreBePeMjOd jQ neBeMjRjQ dS )ja Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.zKa-Ping Yee <ping@lfw.org>z@GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro N)BOM_UTF8lookup)* )tokenc C s g | ]}|d dkr|qS )r _ ).0xr r 2/usr/local/lib/python3.8/lib2to3/pgen2/tokenize.py <listcomp>% s r tokenizegenerate_tokens untokenizec G s dd | d S )N(|))joinchoicesr r r group0 r c G s t | d S )Nr r r r r r any1 r r c G s t | d S )N?r r r r r maybe2 r r c s t fdd D S )Nc 3 s4 | ],} d D ]}| | kr|| V qqdS )) N)casefold)r r ylr r <genexpr>4 s z _combinations.<locals>.<genexpr>)setr r r r _combinations3 s r# z[ \f\t]*z #[^\r\n]*z\\\r?\nz\w+z0[bB]_?[01]+(?:_[01]+)*z(0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?z0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?z[1-9]\d*(?:_\d+)*[lL]?z0[lL]?z[eE][-+]?\d+(?:_\d+)*z\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?z\.\d+(?:_\d+)*z\d+(?:_\d+)*z\d+(?:_\d+)*[jJ]z[jJ]z[^'\\]*(?:\\.[^'\\]*)*'z[^"\\]*(?:\\.[^"\\]*)*"z%[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''z%[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""z'(?:[uUrRbBfF]|[rR][fFbB]|[fFbBuU][rR])?'''"""z'[^\n'\\]*(?:\\.[^\n'\\]*)*'z"[^\n"\\]*(?:\\.[^\n"\\]*)*"z\*\*=?z>>=?z<<=?z<>z!=z//=?z->z[+\-*/%&@|^=<>]=?~z[][(){}]z\r?\nz:=z[:;.,`@]z'[^\n'\\]*(?:\\.[^\n'\\]*)*'z"[^\n"\\]*(?:\\.[^\n"\\]*)*"rRfFbBuUurZuRZUrZUR)r' r( r$ r% c C s i | ]}| d t qS r$ )single3progr prefixr r r <dictcomp>y s r6 c C s i | ]}| d t qS r% )double3progr4 r r r r6 z s c C s i | ] }|d qS Nr r4 r r r r6 { s c C s h | ]}| d qS r2 r r4 r r r <setcomp> s r: c C s h | ]}| d qS r7 r r4 r r r r: s c C s h | ]}| d qS )r' r r4 r r r r: s c C s h | ]}| d qS )r( r r4 r r r r: s c @ s e Zd ZdS ) TokenErrorN__name__ __module____qualname__r r r r r<