
    hP                         d dl mZmZmZ d dlZd dlZd dlmZ d dlm	Z	m
Z
 d dlmZ d dlmZ d dlmZmZ dd	lmZ d
Z G d de      Zy)    )ListOptionalTupleN)verbose_logger)filter_value_from_dictstrip_name_from_messages)get_secret_str)AllMessageValues)ChoicesModelResponse   )OpenAIGPTConfigzhttps://api.x.ai/v1c                   J    e Zd Zedee   fd       Zdee   dee   deee   ee   f   fdZdede	fdZ
dedefdZ	 dd	ed
edededef
dZdedee   d
edededef fdZededdfd       Z	 	 ddedej,                  dededee   d
ededee   dee   def fdZ xZS )XAIChatConfigreturnc                      y)Nxai )selfs    `/var/www/Befach/backend/env/lib/python3.12/site-packages/litellm/llms/xai/chat/transformation.pycustom_llm_providerz!XAIChatConfig.custom_llm_provider   s        api_baseapi_keyc                 V    |xs t        d      xs t        }|xs t        d      }||fS )NXAI_API_BASEXAI_API_KEY)r	   r   )r   r   r   dynamic_api_keys       r   $_get_openai_compatible_provider_infoz2XAIChatConfig._get_openai_compatible_provider_info   s2     M~n=M!B^M%B((r   modelc                    g d}| j                  |      r|j                  d       	 t        j                  || j                        r|j                  d       |S # t
        $ r#}t        j                  d|        Y d }~|S d }~ww xY w)N)frequency_penalty
logit_biaslogprobs
max_tokensnpresence_penaltyresponse_formatseedstreamstream_optionstemperaturetool_choicetoolstop_logprobstop_puserweb_search_optionsstop)r    r   reasoning_effortz,Error checking if model supports reasoning: )_supports_stop_reasonappendlitellmsupports_reasoningr   	Exceptionr   debug)r   r    base_openai_paramses       r   get_supported_openai_paramsz)XAIChatConfig.get_supported_openai_params    s    
( %%e,%%f-	U))1I1I #))*<= "!  	U  #OPQs!STT!!	Us   2A 	B%BBc                     d|v ryd|v ryy)Nzgrok-3-miniFzgrok-4Tr   )r   r    s     r   r5   z#XAIChatConfig._supports_stop_reasonA   s    E!r   non_default_paramsoptional_paramsdrop_paramsc                    | j                  |      }|j                         D ]b  \  }}|dk(  r||d<   |dk(  r@|>g }|D ]"  }	t        |	d      }	|	|j                  |	       $ t	        |      dkD  sP||d<   V||v s[|^|||<   d |S )N)r    max_completion_tokensr%   r.   strictr   )r=   itemsr   r6   len)
r   r?   r@   r    rA   supported_openai_paramsparamvaluer.   tools
             r   map_openai_paramszXAIChatConfig.map_openai_paramsH   s     #'"B"B"B"O.446LE5//05-'!e&7!D1$AD'T* " u:>/4OG,11$-2OE* 7 r   messageslitellm_paramsheadersc                 @    t        |      }t        | 	  |||||      S )zp
        Handle https://github.com/BerriAI/litellm/issues/9720

        Filter out 'name' from messages
        )r   supertransform_request)r   r    rL   r@   rM   rN   	__class__s         r   rQ   zXAIChatConfig.transform_request`   s,     ,H5w(8_ng
 	
r   choiceNc                     | j                   dk(  rB| j                  j                  r+t        | j                  j                        dkD  rd| _         yyyy)z
        Helper to fix finish_reason for tool calls when XAI API returns empty string.
        
        XAI API returns empty string for finish_reason when using tools,
        so we need to set it to "tool_calls" when tool_calls are present.
         r   
tool_callsN)finish_reasonmessagerV   rF   )rS   s    r   (_fix_choice_finish_reason_for_tool_callsz6XAIChatConfig._fix_choice_finish_reason_for_tool_callsr   sM       B&NN%%))*Q.#/F  / & 'r   raw_responsemodel_responserequest_data	json_modec                     t         |   |||||||||	|
|      }|j                  r3|j                  D ]$  }t        |t              s| j                  |       & |S )z
        Transform the response from the XAI API.
        
        XAI API returns empty string for finish_reason when using tools,
        so we need to fix this after the standard OpenAI transformation.
        )r    rZ   r[   logging_objr\   rL   r@   rM   encodingr   r]   )rP   transform_responsechoices
isinstancer   rY   )r   r    rZ   r[   r_   r\   rL   r@   rM   r`   r   r]   responserS   rR   s                 r   ra   z XAIChatConfig.transform_response   st    , 7-%)#%+) . 
 "**fg.AA&I + r   )F)NN)__name__
__module____qualname__propertyr   strr   r   r   listr=   boolr5   dictrK   r   r
   rQ   staticmethodr   rY   httpxResponser   ra   __classcell__)rR   s   @r   r   r      s   Xc]  ) )08)	x}hsm+	,)" " "B3 4  "   	
  
0

 '(
 	

 
 
 

$ 
0 
0T 
0 
0. "&$(** nn* &	* * '(* * * #* D>* 
* *r   r   )typingr   r   r   rn   r7   litellm._loggingr   8litellm.litellm_core_utils.prompt_templates.common_utilsr   r   litellm.secret_managers.mainr	   litellm.types.llms.openair
   litellm.types.utilsr   r   openai.chat.gpt_transformationr   r   r   r   r   r   <module>rx      s:    ( (   + 8 6 6 =$UO Ur   