
    g                     ~    d Z ddlmZmZ ddlZddlmZ ddlmZ ddlm	Z	m
Z
 ddlmZmZmZ dd	lmZ  G d
 de      Zy)a  
Support for o1 model family 

https://platform.openai.com/docs/guides/reasoning

Translations handled by LiteLLM:
- modalities: image => drop param (if user opts in to dropping param)  
- role: system ==> translate to role 'user' 
- streaming => faked by LiteLLM 
- Tools, response_format =>  drop param (if user opts in to dropping param) 
- Logprobs => drop param (if user opts in to dropping param) 
    )ListOptionalN)verbose_logger)get_llm_provider)AllMessageValuesChatCompletionUserMessage)supports_function_callingsupports_response_schemasupports_system_messages   )OpenAIGPTConfigc            	            e Zd ZdZe fd       Z	 ddee   dee   dee   defdZ	dede
f fdZd	ed
ededef fdZdedefdZdee   dedee   fdZ xZS )OpenAIO1ConfigzF
    Reference: https://platform.openai.com/docs/guides/reasoning
    c                      t         |          S N)super
get_config)cls	__class__s    _/var/www/openai/venv/lib/python3.12/site-packages/litellm/llms/openai/chat/o1_transformation.pyr   zOpenAIO1Config.get_config"   s    w!##    modelstreamcustom_llm_providerreturnc                 6    |dury|yddg}|D ]  }||v s y y)NTFzo1-miniz
o1-preview )selfr   r   r   supported_stream_modelssupported_models         r   should_fake_streamz!OpenAIO1Config.should_fake_stream&   s<     =#,l";6O%'  7 r   c                    t         
|   |      }g d}	 t        |      \  }}}}t        ||      }t        ||      }|sU|j                  d       |j                  d       |j                  d       |j                  d	       |j                  d
       |s|j                  d       |D 	cg c]	  }	|	|vs|	 c}	S # t        $ r t	        j
                  d| d       d}Y w xY wc c}	w )zF
        Get the supported OpenAI params for the given model

        )r   )logprobstop_ppresence_penaltyfrequency_penaltytop_logprobsz)Unable to infer model provider for model=z3, defaulting to openai for o1 supported param checkopenaitoolstool_choiceparallel_tool_callsfunction_call	functionsresponse_format)	r   get_supported_openai_paramsr   	Exceptionr   debugr	   r
   append)r   r   all_openai_paramsnon_supported_paramsr   api_baseapi_key_supports_function_calling_supports_response_schemaparamr   s             r   r/   z*OpenAIO1Config.get_supported_openai_params7   s    "G?e?L 
	+<L=9E&' &?&&
" %=UDW$X!) ''0 ''6 ''(=> ''8 ''4( ''(9:  1
0eEAU4UE0
 	
+  	+  ;E7Buv #+		+*
s   B? .	C)8C)?$C&%C&non_default_paramsoptional_paramsdrop_paramsc                 *   d|v r|j                  d      |d<   d|v re|j                  d      }|R|dk(  r||d<   nGt        j                  du s|du rn0t        j                  j	                  dj                  |      d      t        |   ||||      S )	N
max_tokensmax_completion_tokenstemperaturer   TzuO-1 doesn't support temperature={}. To drop unsupported openai params from the call, set `litellm.drop_params = True`i  )messagestatus_code)poplitellmr<   utilsUnsupportedParamsErrorformatr   _map_openai_params)r   r:   r;   r   r<   temperature_valuer   s         r   map_openai_paramsz OpenAIO1Config.map_openai_paramsc   s     --7I7M7M8O34 ..1C1G1G1V ,$)5FOM2 **d2kT6I%mmBB %\  %c  %c 1% ),	 C   w)
 	
r   c                 2    |t         j                  v rd|v ryy)No1TF)rD   open_ai_chat_completion_models)r   r   s     r   is_model_o1_reasoning_modelz*OpenAIO1Config.is_model_o1_reasoning_model   s    G:::tu}r   messagesc                     t        |d      }t        |      D ]&  \  }}|d   dk(  s|rt        |d   d      }|||<   ( |S )z
        Handles limitations of O-1 model family.
        - modalities: image => drop param (if user opts in to dropping param)
        - role: system ==> translate to role 'user'
        r(   rolesystemcontentuser)rS   rQ   )r   	enumerater   )r   rO   r   _supports_system_messagesirA   new_messages          r   _transform_messagesz"OpenAIO1Config._transform_messages   sY     %=UH$M!#H-JAwv(*3L7#I.V * . r   r   )__name__
__module____qualname____doc__classmethodr   r   strboolr!   listr/   dictrJ   rN   r   r   rY   __classcell__)r   s   @r   r   r      s     $ $ .2	}  &c]	
 
"*
 *
 *
X
 
 
 	

 
@  
-.7:		r   r   )r]   typingr   r   rD   r   1litellm.litellm_core_utils.get_llm_provider_logicr   litellm.types.llms.openair   r   litellm.utilsr	   r
   r   gpt_transformationr   r   r   r   r   <module>ri      s8    "  " N Q  0{_ {r   