
    h                     Z    d Z ddlZ ej                  dd       ddlmZ  G d de      Zy)	z
Support for Llama API's `https://api.llama.com/compat/v1` endpoint.

Calls done in OpenAI/openai.py as Llama API is openai-compatible.

Docs: https://llama.developer.meta.com/docs/features/compatibility/
    NignorezPydantic serializer warnings)message)OpenAIGPTConfigc            
       H     e Zd Zdedef fdZdededededef
 fdZ xZ	S )LlamaAPIConfigmodelreturnc                 &    t         |   |      }|S )z
        Llama API has limited support for OpenAI parameters

        function_call, tools, and tool_choice are working
        response_format: only json_schema is working
        )superget_supported_openai_params)selfr   optional_params	__class__s      g/var/www/Befach/backend/env/lib/python3.12/site-packages/litellm/llms/meta_llama/chat/transformation.pyr   z*LlamaAPIConfig.get_supported_openai_params   s      '=eD    non_default_paramsr   drop_paramsc                     t         |   ||||      }d|v r(|d   j                  d      dk7  r|j                  d       |S )Nresponse_formattypejson_schema)r   map_openai_paramsgetpop)r   r   r   r   r   mapped_openai_paramsr   s         r   r   z LlamaAPIConfig.map_openai_params   sW      %w8 
 !55$%67;;FC}T $$%67##r   )
__name__
__module____qualname__strlistr   dictboolr   __classcell__)r   s   @r   r   r      sP    	 	 	$ $ $ 	$
 $ 
$ $r   r   )__doc__warningsfilterwarnings+litellm.llms.openai.chat.gpt_transformationr   r    r   r   <module>r)      s3       *H I G$_ $r   