
    dafq                     |    d dl Z d dlZd dlmZmZmZ eZe j                  Z	dej                  defdZ G d d      Zy)    N)DictIteratorListtokreturnc                     d| j                   d    d| j                   d    dt        j                  | j                      d| j                  z  S )Nz%-25.25sr   .   z: :)starttokentok_nametypestring)r   s    :/root/Python-3.12.4/Tools/peg_generator/pegen/tokenizer.pyshorttokr   
   sK    399Q<.#))A,r%..:R9SSTUXU_U_Tbccc    c                   8   e Zd ZU dZeej                     ed<   ddddeej                     de	de
fd	Zd
ej                  fdZd
ej                  fdZd
ej                  fdZd
ej                  fdZdee   d
ee	   fdZd
efdZded
dfdZde
de
d
dfdZy)	TokenizerzZCaching wrapper for the tokenize module.

    This is pretty tied to Python's syntax.
    _tokens F)pathverbosetokengenr   r   c                    || _         g | _        d| _        || _        i | _        || _        |r| j                  dd       y y )Nr   F)	_tokengenr   _index_verbose_lines_pathreport)selfr   r   r   s       r   __init__zTokenizer.__init__   sD     "&(
KKu% r   r   c                     | j                   t        | j                        k(   }| j                         }| xj                   dz  c_         | j                  r| j                  |d       |S )z,Return the next token and updates the index.r
   F)r   lenr   peekr   r!   )r"   cachedr   s      r   getnextzTokenizer.getnext"   sO    [[C$555iikq==KK&
r   c                    | j                   t        | j                        k(  r?t        | j                        }|j
                  t        j                  t        j                  fv re|j
                  t        j                  k(  r|j                  j                         r|j
                  t        j                  k(  r7| j                  r+| j                  d   j
                  t        j                  k(  r| j                  j                  |       | j                  s&|j                   | j"                  |j$                  d   <   | j                   t        | j                        k(  r?| j                  | j                      S )z3Return the next token *without* updating the index.r   )r   r%   r   nextr   r   tokenizeNLCOMMENTr   
ERRORTOKENr   isspaceNEWLINEappendr    liner   r   r"   r   s     r   r&   zTokenizer.peek+   s    kkS..t~~&CxxHKK)9)9::xx5+++

0B0B0DEMM)LLLL$))U]]:LL$::,/HHCIIaL) kkS.. ||DKK((r   c                 X    | j                   s| j                          | j                   d   S )Nr*   )r   r(   r"   s    r   diagnosezTokenizer.diagnose>   s!    ||LLN||Br   c                 
   t        | j                  d | j                         D ]]  }|j                  t        j
                  k7  s!|j                  t        j                  k  s|j                  t        j                  kD  s\ |S  S N)reversedr   r   r   r,   	ENDMARKERr1   DEDENTr4   s     r   get_last_non_whitespace_tokenz'Tokenizer.get_last_non_whitespace_tokenC   sg    DLL4;;78 	Cxx8---8+++sxx(///I
	
 
r   line_numbersc                    | j                   r| j                   }nQt        |      }i }d}d}t        | j                        5 }|D ]  }|dz  }||v s|dz  }|||<   ||k(  s n ddd       |D cg c]  }||   	 c}S # 1 sw Y   xY wc c}w )z4Retrieve source lines corresponding to line numbers.r   r
   N)r   r%   openr    )r"   r>   linesncountseenfls           r   	get_lineszTokenizer.get_linesK   s    ;;KKEL!AEEDdjj! "Q "AQJE,	'(e19!"" #//Qa//" " 0s    A>A> A>/B
>Bc                     | j                   S r9   )r   r6   s    r   markzTokenizer.mark_   s    {{r   indexNc                    || j                   k(  ry d|cxk  rt        | j                        k  sn J |t        | j                        f       | j                   }|| _         | j                  r| j	                  d||k         y y )Nr   T)r   r%   r   r   r!   )r"   rJ   	old_indexs      r   resetzTokenizer.resetb   so    DKKE.S..JDLL8I0JJ.KK	==KKei/0 r   r'   backc                 "   |rd| j                   z  dz   }n'|rd| j                   z  dz   }nd| j                   z  dz   }| j                   dk(  rt        | d       y | j                  | j                   dz
     }t        | dt        |              y )N->*r   z (Bof)r
    )r   printr   r   )r"   r'   rN   fillr   s        r   r!   zTokenizer.reportk   s    $s*D$s*D$s*D;;!TF&/",,t{{Q/CTF!HSM?+,r   )__name__
__module____qualname____doc__r   r,   	TokenInfo__annotations__r   strboolr#   r(   r&   r7   r=   intrG   MarkrI   rM   r!    r   r   r   r      s    
 ($$%% FHY^
& !3!34
&?B
&RV
&++ )h(( )& (,,  
x/A/A 0d3i 0DI 0(d 14 1D 1-T - -$ -r   r   )r   r,   typingr   r   r   r^   r_   EXACT_TOKEN_TYPESexact_token_typesrZ   r\   r   r   r`   r   r   <module>rd      sJ      ' '
++ d($$ d dh- h-r   