
     h                        S r SSKJr  SSKrSSKrSSKrSSKrSSKrSSKrSSK	r	SSK
Jr  SSKJr  SSKJrJr  \\	R"                     rSS jrSS jrSS	 jrSS
 jrSS jrg)z"Better tokenizing for coverage.py.    )annotationsN)Iterable)env)TLineNoTSourceTokenLinesc           	   #    #    SnSnSnU  GHe  u  pEu  pgu  pn
X(:w  Ga  U(       Ga  UR                  S5      (       a  SnUR                  S5      (       a  SnOU[        R                  :X  aA  UR                  S5      (       a(  UR                  S5      R                  U5      (       a  SnO8SnO5[        R
                  R                  (       a  U[        R                  :X  a  SnU(       aA  [        UR                  S	5      S
   5      S-
  n[        R                  " SSXl4XlS-   4U5      v   U
nU[        R                  [        R                  4;  a  Un[        R                  " XEXg4X4U
5      v   UnGMh     g7f)a.  Return all physical tokens, even line continuations.

tokenize.generate_tokens() doesn't return a token for the backslash that
continues lines.  This wrapper provides those tokens so that we can
re-create a faithful representation of the original source.

Returns the same values as generate_tokens()

N z\
T\Fz \

   i    )endswithtokenSTRINGrstripr   
PYBEHAVIORfstring_syntaxFSTRING_MIDDLElensplittokenize	TokenInfoNEWLINENL)toks	last_linelast_lineno
last_ttextttypettextslinenoscolelinenoecolltextinject_backslashccols                q/root/racknerd_01/codex-conversation/amazon-q-terraform/.venv/lib/python3.13/site-packages/coverage/phystokens.py_phys_tokensr+      s`     !IKJAE=ow!Y//77  $( &&t,,',$ell*!**622%,,W5>>zJJ ,0( ,1(^^22u@T@T7T',$#yt4R89A=D",,  (+!  I))8;;77J  QVWWm BFs   E4E6c                   [        5       n[        R                  " [        R                  " U 5      5       H  n[	        U[        R
                  5      (       aU  UR                  UR                  5        UR                   H(  nUR                  UR                  R                  5        M*     Mw  [        R                  S:  d  M  [	        U[        R                  5      (       d  M  UR                  UR                  5        M     U$ )zCHelper for finding lines with soft keywords, like match/case lines.)      )setastwalkparse
isinstanceMatchaddlinenocasespatternsysversion_info	TypeAlias)sourcesoft_key_linesnodecases       r*   find_soft_key_linesr@   ]   s    #&5N6*+dCII&&t{{+

""4<<#6#67 #(Zcmm-L-Lt{{+ ,     c              #    #    [         R                  [         R                  [         R                  [        R
                  1n/ nSnU R                  S5      R                  SS5      n [        U 5      n[        U 5      n[        U5       GH  u  pgu  pu  pn
Sn[        R                  " SU5       GH  nUS:X  a  Uv   / nSnSnGOnUS:X  a  SnGOdXa;   a  SnGO[[        R                  R                  (       aD  U[         R                   :X  a0  UR                  S	S
5      R                  SS5      nU	[#        U5      -   nU(       a  X:  a  UR%                  SSX-
  -  45        Sn[        R&                  R)                  US5      R+                  5       SS nU[         R,                  :X  ay  [.        R0                  " U5      (       a  SnO[[.        R2                  " U5      (       a@  [#        U5      S:X  a  SnO [#        U5      S:X  a  US   S   S:X  a  SnOSnU(       a  X;   a  SnUR%                  X45        SnSn	GM     W(       d  GM  UnGM     U(       a  Uv   gg7f)a  Generate a series of lines, one for each line in `source`.

Each line is a list of pairs, each pair is a token::

    [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]

Each pair has a token class, and the token text.

If you concatenate all the token texts, and then join them with newlines,
you should have your original `source` back, with two differences:
trailing white space is not preserved, and a final line with no newline
is indistinguishable from a final line with a newline.

r      z
r   Tz(
)Fr
   {z{{}z}}ws xxNr-   keyr   )r   INDENTDEDENTr   r   r   
expandtabsreplacegenerate_tokensr@   r+   rer   r   r   r   r   r   appendtok_namegetlowerNAMEkeyword	iskeywordissoftkeyword)r<   	ws_tokenslinecoltokgenr=   r!   r"   sliner$   _r&   
mark_startpartmark_end	tok_classis_start_of_lines                    r*   source_token_linesrc   l   s	      u||U]]HKKHI"$D
Cq!))&$7FV$F(0N5A&5I1muYa
HHVU+Dt|
  # >>00Ue>R>R5R<<T2::3ED#d)+D$*KKsdj'9 :;!&J$--11%>DDFrJ	EJJ&((//$)	 ..u55t9>/3,!$i1n$q'!*2D/3,/4,+0G(-IY-.DE ,F 8CM 6JP 
 s   H;IIc                n    [         R                  " U 5      R                  n[        R                  " U5      $ )zA helper around `tokenize.generate_tokens`.

Originally this was used to cache the results, but it didn't seem to make
reporting go faster, and caused issues with using too much memory.

)ioStringIOreadliner   rN   )textrg   s     r*   rN   rN      s)     {{4 ))H##H--rA   c                |    [        U R                  S5      5      R                  n[        R                  " U5      S   $ )zDetermine the encoding for `source`, according to PEP 263.

`source` is a byte string: the text of the program.

Returns a string, the name of the encoding.

Tr   )iter
splitlines__next__r   detect_encoding)r<   rg   s     r*   source_encodingrn      s5     F%%d+,55H##H-a00rA   )r   
TokenInfosreturnro   )r<   strrp   zset[TLineNo])r<   rq   rp   r   )rh   rq   rp   ro   )r<   bytesrp   rq   )__doc__
__future__r   r0   re   rU   rO   r9   r   r   collections.abcr   coverager   coverage.typesr   r   r   ro   r+   r@   rc   rN   rn    rA   r*   <module>ry      sX    ) " 
 	  	 
   $  5h(()
CLBJ.	1rA   