o
    z,hg                     @   s\   d Z ddlZddlmZ ddlmZ ddlmZmZ ddl	m
Z
 G dd dZdd	d
ZdS )z	SQL Lexer    N)Lock)
TextIOBase)tokenskeywords)consumec                   @   sX   e Zd ZdZdZe Zedd Zdd Z	dd Z
d	d
 Zdd Zdd ZdddZdS )LexerzrThe Lexer supports configurable syntax.
    To add support for additional keywords, use the `add_keywords` method.Nc                 C   s^   | j ! | jdu r|  | _| j  W d   | jS W d   | jS 1 s'w   Y  | jS )zRReturns the lexer instance used internally
        by the sqlparse core functions.N)_lock_default_instancedefault_initialization)cls r   D/var/www/html/rh/venv/lib/python3.10/site-packages/sqlparse/lexer.pyget_default_instance0   s   


zLexer.get_default_instancec                 C   s   |    | tj | tj | tj | tj | tj | tj	 | tj
 | tj | tj | tj dS )zlInitialize the lexer with default dictionaries.
        Useful if you need to revert custom syntax settings.N)clearset_SQL_REGEXr   	SQL_REGEXadd_keywordsKEYWORDS_COMMONKEYWORDS_ORACLEKEYWORDS_MYSQLKEYWORDS_PLPGSQLKEYWORDS_HQLKEYWORDS_MSACCESSKEYWORDS_SNOWFLAKEKEYWORDS_BIGQUERYKEYWORDSselfr   r   r   r
   :   s   zLexer.default_initializationc                 C   s   g | _ g | _dS )zClear all syntax configurations.
        Useful if you want to load a reduced set of syntax configurations.
        After this call, regexps and keyword dictionaries need to be loaded
        to make the lexer functional again.N)
_SQL_REGEX	_keywordsr   r   r   r   r   I   s   
zLexer.clearc                    s$   t jt jB   fdd|D | _dS )z.Set the list of regex that will parse the SQL.c                    s"   g | ]\}}t | j|fqS r   )recompilematch).0rxttFLAGSr   r   
<listcomp>T   s    z'Lexer.set_SQL_REGEX.<locals>.<listcomp>N)r    
IGNORECASEUNICODEr   )r   r   r   r&   r   r   Q   s   
zLexer.set_SQL_REGEXc                 C   s   | j | dS )zhAdd keyword dictionaries. Keywords are looked up in the same order
        that dictionaries were added.N)r   append)r   r   r   r   r   r   Y   s   zLexer.add_keywordsc                 C   s6   |  }| jD ]}||v r|| |f  S qtj|fS )zChecks for a keyword.

        If the given value is in one of the KEYWORDS_* dictionary
        it's considered a keyword. Otherwise, tokens.Name is returned.
        )upperr   r   Name)r   valuevalkwdictr   r   r   
is_keyword^   s   

zLexer.is_keywordc           	      c   s   t |tr
| }t |trn,t |tr3|r||}nz|d}W n ty2   |d}Y n
w tdt	|t
|}|D ]A\}}| jD ]3\}}|||}|sUqIt |tjrc|| fV  n|tju rp| | V  t|| | d   ntj|fV  qBdS )a  
        Return an iterable of (tokentype, value) pairs generated from
        `text`. If `unfiltered` is set to `True`, the filtering mechanism
        is bypassed even if filters are defined.

        Also preprocess the text, i.e. expand tabs and strip it if
        wanted and applies registered filters.

        Split ``text`` into (tokentype, text) pairs.

        ``stack`` is the initial stack (default: ``['root']``)
        zutf-8zunicode-escapez+Expected text or file-like object, got {!r}   N)
isinstancer   readstrbytesdecodeUnicodeDecodeError	TypeErrorformattype	enumerater   r   
_TokenTypegroupr   PROCESS_AS_KEYWORDr1   r   endError)	r   textencodingiterableposcharrexmatchactionmr   r   r   
get_tokensk   s>   





zLexer.get_tokensN)__name__
__module____qualname____doc__r	   r   r   classmethodr   r
   r   r   r   r1   rJ   r   r   r   r   r      s    
	r   c                 C   s   t  | |S )zTokenize sql.

    Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream
    of ``(token type, value)`` items.
    )r   r   rJ   )sqlrC   r   r   r   tokenize   s   rR   rK   )rO   r    	threadingr   ior   sqlparser   r   sqlparse.utilsr   r   rR   r   r   r   r   <module>   s    