
    h              &          d Z ddlmZmZmZmZmZmZmZ ddl	m
Z ddl	mZ ddlmZ 	 	 	 	 	 	 	 	 	 	 ddedee   d	ed
ee   deee      dee   dee   dee   dee   dee   deee      dee   dee   deeef   fdZ	 	 	 	 	 	 	 	 	 	 ddedee   d	ed
ee   deee      dee   dee   dee   dee   dee   deee      dee   dee   deeee   eeeeeee   f   f   f   fdZy)z
Interface for Anthropic's messages API

Use this to call LLMs in Anthropic /messages Request/Response format

This is an __init__.py file to allow the following interface

- litellm.messages.acreate
- litellm.messages.create

    )AnyAsyncIterator	CoroutineDictListOptionalUnion)anthropic_messages)anthropic_messages_handler)AnthropicMessagesResponseN
max_tokensmessagesmodelmetadatastop_sequencesstreamsystemtemperaturethinkingtool_choicetoolstop_ktop_preturnc                 P   K   t        d| |||||||||	|
||d| d{   S 7 w)  
    Async wrapper for Anthropic's messages API

    Args:
        max_tokens (int): Maximum tokens to generate (required)
        messages (List[Dict]): List of message objects with role and content (required)
        model (str): Model name to use (required)
        metadata (Dict, optional): Request metadata
        stop_sequences (List[str], optional): Custom stop sequences
        stream (bool, optional): Whether to stream the response
        system (str, optional): System prompt
        temperature (float, optional): Sampling temperature (0.0 to 1.0)
        thinking (Dict, optional): Extended thinking configuration
        tool_choice (Dict, optional): Tool choice configuration
        tools (List[Dict], optional): List of tool definitions
        top_k (int, optional): Top K sampling parameter
        top_p (float, optional): Nucleus sampling parameter
        **kwargs: Additional arguments

    Returns:
        Dict: Response from the API
    r   r   r   r   r   r   r   r   r   r   r   r   r   N )_async_anthropic_messagesr   r   r   r   r   r   r   r   r   r   r   r   r   kwargss                 i/var/www/Befach/backend/env/lib/python3.12/site-packages/litellm/anthropic_interface/messages/__init__.pyacreater#      sV     L + %    s   &$&c                 4    t        d| |||||||||	|
||d|S )r   r   r   )_sync_anthropic_messagesr    s                 r"   creater&   R   sH    T $ %      )
NNFNNNNNNN)__doc__typingr   r   r   r   r   r   r	   Alitellm.llms.anthropic.experimental_pass_through.messages.handlerr
   r   r   r%   8litellm.types.llms.anthropic_messages.anthropic_responser   intstrboolfloatr#   r&   r   r'   r"   <module>r0      s,  
 N M M  $*." #'#"&"&!554j5 5 tn	5
 T#Y'5 TN5 SM5 %5 tn5 $5 DJ5 C=5 E?5 $m345x  $*." #'#"&"&!994j9 9 tn	9
 T#Y'9 TN9 SM9 %9 tn9 $9 DJ9 C=9 E?9 #c37s9KKLLMO9r'   