
    /~hq                        d Z ddlmZ ddlZddlZddlZddlZddlZddlZddl	Z	ddl
mZ ddlmZ ddlmZmZ ee	j"                     ZddZddZdd	Zdd
ZddZy)z"Better tokenizing for coverage.py.    )annotationsN)Iterable)env)TLineNoTSourceTokenLinesc           	   #    K   d}d}d}| D ]:  \  }}\  }}\  }}	}
||k7  r|r|j                  d      rd}|j                  d      rd}nr|t        j                  k(  r7|j                  d      r#|j                  d      j                  |      rd}n+d}n(t        j
                  d	k\  r|t        j                  k(  rd}|rBt        |j                  d
      d         dz
  }t        j                  dd||f||dz   f|       |
}|t        j                  t        j                  fvr|}t        j                  ||||f||	f|
       |}= yw)aB  Return all physical tokens, even line continuations.

    tokenize.generate_tokens() doesn't return a token for the backslash that
    continues lines.  This wrapper provides those tokens so that we can
    re-create a faithful representation of the original source.

    Returns the same values as generate_tokens()

    N z\
T\Fz \
      
   i    )endswithtokenSTRINGrstripsysversion_infoFSTRING_MIDDLElensplittokenize	TokenInfoNEWLINENL)toks	last_linelast_lineno
last_ttextttypettextslinenoscolelinenoecolltextinject_backslashccols                \/var/www/peopleoo.sandbox-dev.co.uk/venv/lib/python3.12/site-packages/coverage/phystokens.py_phys_tokensr.      si     !IKJAE 2=uow'!Y//7  $( &&t,',$ell*!**62!((1:::F ,0( ,1(%%0Ue>R>R5R',$#yt4R89A=D",,v $'46):! 
 I))8;;77J  $QVWWe2s   EE
c                   t               }t        j                  t        j                  |             D ]  }t        j
                  dk\  rlt        |t        j                        rR|j                  |j                         |j                  D ]'  }|j                  |j                  j                         ) t        j
                  dk\  st        |t        j                        s|j                  |j                          |S )zCHelper for finding lines with soft keywords, like match/case lines.r   
   r   )setastwalkparser   r   
isinstanceMatchaddlinenocasespattern	TypeAlias)sourcesoft_key_linesnodecases       r-   find_soft_key_linesrA   [   s    #&5N6*+ ,w&:dCII+Ft{{+

 8""4<<#6#678(Zcmm-Lt{{+,     c              #  <  K   t         j                  t         j                  t         j                  t        j
                  h}g }d}| j                  d      j                  dd      } t        |       }t        j                  j                  rt        |       }n
t               }t        |      D ]d  \  }}\  }}	\  }
}}
d}t        j                   d|      D ]3  }|dk(  r| g }d}d}n|dk(  rd}n||v rd}n|r |	|kD  r|j#                  d	d
|	|z
  z  f       d}t        j$                  j'                  |d      j)                         dd }|t         j*                  k(  rt-        j.                  |      rd}nyt0        j2                  dk\  rft        j                  j                  rLt-        j4                  |      r7t7        |      dk(  rd}nt7        |      dk(  r|d   d   d	k(  rd}nd}|r||v rd}|j#                  ||f       d}d}	6 sc|}g |r| yyw)a  Generate a series of lines, one for each line in `source`.

    Each line is a list of pairs, each pair is a token::

        [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]

    Each pair has a token class, and the token text.

    If you concatenate all the token texts, and then join them with newlines,
    you should have your original `source` back, with two differences:
    trailing white space is not preserved, and a final line with no newline
    is indistinguishable from a final line with a newline.

    r      z
r   Tz(
)Fr
   ws xxNr   keyr0   r   )r   INDENTDEDENTr   r   r   
expandtabsreplacegenerate_tokensr   
PYBEHAVIORsoft_keywordsrA   r2   r.   rer   appendtok_namegetlowerNAMEkeyword	iskeywordr   r   issoftkeywordr   )r=   	ws_tokenslinecoltokgenr>   r$   r%   sliner'   _r)   
mark_startpartmark_end	tok_classis_start_of_lines                    r-   source_token_linesrd   j   s      u||U]]HKKHI"$D
Cq!))&$7FV$F
~~##,V45A&5I &1umudYa
HHVU+ "	Dt|
  )# $*KKsdSj'9 :;!&J$--11%>DDFrJ	EJJ&((/$)	))W4 >>77G<Q<QRW<X"4yA~37 0"%d)q.d1gajD6H37 038 0/E^4K,1	Y-.DE"	F CM&P 
 s   HHHc                j    t        j                  |       j                  }t        j                  |      S )zA helper around `tokenize.generate_tokens`.

    Originally this was used to cache the results, but it didn't seem to make
    reporting go faster, and caused issues with using too much memory.

    )ioStringIOreadliner   rM   )textrh   s     r-   rM   rM      s)     {{4 ))H##H--rB   c                z    t        | j                  d            j                  }t        j                  |      d   S )zDetermine the encoding for `source`, according to PEP 263.

    `source` is a byte string: the text of the program.

    Returns a string, the name of the encoding.

    Tr   )iter
splitlines__next__r   detect_encoding)r=   rh   s     r-   source_encodingro      s5     F%%d+,55H##H-a00rB   )r    
TokenInfosreturnrp   )r=   strrq   zset[TLineNo])r=   rr   rq   r   )ri   rr   rq   rp   )r=   bytesrq   rr   )__doc__
__future__r   r3   rf   rV   rP   r   r   r   collections.abcr   coverager   coverage.typesr   r   r   rp   r.   rA   rd   rM   ro    rB   r-   <module>rz      sY    ) " 
 	  	 
   $  5 h(()
?DEP.	1rB   