o
    Jjg&m                     @  s|  d dl mZ d dlZd dlZd dlmZ d dlmZ d dlmZm	Z	m
Z
mZmZmZmZmZmZmZ d dlmZmZ d dlmZ d dlmZ d d	lmZmZmZ d d
lmZ d dlm Z  d dl!m"Z"m#Z#m$Z$ d dl%m&Z& er~d dl'Z'd dl(m)Z) d dl*m+Z+ G dd deZ,G dd deZ-d'ddZ.d(ddZ/d)ddZ0d*d!d"Z1eee- e,ed# ed$ f Z2G d%d& d&eee2f Z3dS )+    )annotationsN)JSONDecodeError)sleep)
TYPE_CHECKINGAnyCallableDictListOptionalSequenceTupleTypeUnion)AgentActionAgentFinish)CallbackManager)dumpd)RunnableConfigRunnableSerializableensure_config)BaseTool)convert_to_openai_tool)	BaseModelFieldmodel_validator)Self)ThreadMessage)RequiredActionFunctionToolCallc                   @  s0   e Zd ZU dZded< ded< ed
ddZd	S )OpenAIAssistantFinishzuAgentFinish with run and thread metadata.

    Parameters:
        run_id: Run id.
        thread_id: Thread id.
    strrun_id	thread_idreturnboolc                 C     dS z]Check if the class is serializable by LangChain.

        Returns:
            False
        F clsr&   r&   ^/var/www/html/zoom/venv/lib/python3.10/site-packages/langchain/agents/openai_assistant/base.pyis_lc_serializable0      z(OpenAIAssistantFinish.is_lc_serializableNr"   r#   __name__
__module____qualname____doc____annotations__classmethodr*   r&   r&   r&   r)   r   %   s   
 r   c                   @  s8   e Zd ZU dZded< ded< ded< eddd	Zd
S )OpenAIAssistantActionzAgentAction with info needed to submit custom tool output to existing run.

    Parameters:
        tool_call_id: Tool call id.
        run_id: Run id.
        thread_id: Thread id
    r   tool_call_idr    r!   r"   r#   c                 C  r$   r%   r&   r'   r&   r&   r)   r*   G   r+   z(OpenAIAssistantAction.is_lc_serializableNr,   r-   r&   r&   r&   r)   r4   :   s   
 r4   r"   openai.OpenAIc               
   C  V   z	dd l } |  W S  ty } ztd|d }~w ty* } ztd|d }~ww Nr   zBUnable to import openai, please install with `pip install openai`.zuPlease make sure you are using a v1.1-compatible version of openai. You can install with `pip install "openai>=1.1"`.)openaiOpenAIImportErrorAttributeErrorr9   er&   r&   r)   _get_openai_clientQ   $   
r?   openai.AsyncOpenAIc               
   C  r7   r8   )r9   AsyncOpenAIr;   r<   r=   r&   r&   r)   _get_openai_async_clienta   r@   rC   tool:Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]r#   c                 C  s"   d}t | tod| v o| d |v S )z<Determine if tool corresponds to OpenAI Assistants built-in.)code_interpreter	retrievaltype)
isinstancedict)rD   assistants_builtin_toolsr&   r&   r)   _is_assistants_builtin_toolq   s   

rL   Dict[str, Any]c                 C  s   t | r| S t| S )zConvert a raw function/class to an OpenAI tool.

    Note that OpenAI assistants supports several built-in tools,
    such as "code_interpreter" and "retrieval."
    )rL   r   )rD   r&   r&   r)   _get_assistants_tool}   s   rN   r   r   c                   @  s  e Zd ZU dZeedZded< 	 dZded< 	 ded< 	 d	Z	d
ed< 	 dZ
ded< 	 edddFddZedddGddZ	dHdId$d%Zedd&dJd(d)Z	dHdKd*d+ZdLd.d/ZdMd0d1ZdNd3d4ZdOd6d7ZdPd:d;ZdLd<d=ZdMd>d?ZdNd@dAZdOdBdCZdPdDdEZdS )QOpenAIAssistantRunnablea  Run an OpenAI Assistant.

    Example using OpenAI tools:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable

            interpreter_assistant = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=[{"type": "code_interpreter"}],
                model="gpt-4-1106-preview"
            )
            output = interpreter_assistant.invoke({"content": "What's 10 - 4 raised to the 2.7"})

    Example using custom tools and AgentExecutor:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable
            from langchain.agents import AgentExecutor
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            agent_executor = AgentExecutor(agent=agent, tools=tools)
            agent_executor.invoke({"content": "What's 10 - 4 raised to the 2.7"})


    Example using custom tools and custom execution:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable
            from langchain.agents import AgentExecutor
            from langchain_core.agents import AgentFinish
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            def execute_agent(agent, tools, input):
                tool_map = {tool.name: tool for tool in tools}
                response = agent.invoke(input)
                while not isinstance(response, AgentFinish):
                    tool_outputs = []
                    for action in response:
                        tool_output = tool_map[action.tool].invoke(action.tool_input)
                        tool_outputs.append({"output": tool_output, "tool_call_id": action.tool_call_id})
                    response = agent.invoke(
                        {
                            "tool_outputs": tool_outputs,
                            "run_id": action.run_id,
                            "thread_id": action.thread_id
                        }
                    )

                return response

            response = execute_agent(agent, tools, {"content": "What's 10 - 4 raised to the 2.7"})
            next_response = execute_agent(agent, tools, {"content": "now add 17.241", "thread_id": response.thread_id})

    )default_factoryr   clientNasync_clientr   assistant_idg     @@floatcheck_every_msFr#   as_agentafter)moder"   r   c                 C  s,   | j d u rdd l}| jj}|j|d| _ | S )Nr   )api_key)rR   r9   rQ   rY   rB   )selfr9   rY   r&   r&   r)   validate_async_client   s
   
z-OpenAIAssistantRunnable.validate_async_client)rQ   nameinstructionstoolsSequence[Union[BaseTool, dict]]model2Optional[Union[openai.OpenAI, openai.AzureOpenAI]]kwargsc                K  s@   |pt  }|jjj||dd |D |d}| d|j|d|S )a*  Create an OpenAI Assistant and instantiate the Runnable.

        Args:
            name: Assistant name.
            instructions: Assistant instructions.
            tools: Assistant tools. Can be passed in OpenAI format or as BaseTools.
            model: Assistant model to use.
            client: OpenAI or AzureOpenAI client.
                Will create a default OpenAI client if not specified.
            kwargs: Additional arguments.

        Returns:
            OpenAIAssistantRunnable configured to run using the created assistant.
        c                 S     g | ]}t |qS r&   rN   .0rD   r&   r&   r)   
<listcomp>      z<OpenAIAssistantRunnable.create_assistant.<locals>.<listcomp>r\   r]   r^   r`   )rS   rQ   Nr&   )r?   beta
assistantscreateid)r(   r\   r]   r^   r`   rQ   rb   	assistantr&   r&   r)   create_assistant   s   
z(OpenAIAssistantRunnable.create_assistantinputrJ   configOptional[RunnableConfig]
OutputTypec              
   C  s  t |}tj|d|d|dd}|jt| ||dp#|  d}zm| jrC|drC| |d }| j	j
jjjdi |}nGd|vr`d	|d
 |ddg|dd}| ||}n*d|vr~| j	j
jjj|d |d
 d	|dd}| |}n| j	j
jjjdi |}| |j|j}W n ty }	 z||	 |	d}	~	ww z| |}
W n ty }	 z|j|	| d |	d}	~	ww ||
 |
S )a  Invoke assistant.

        Args:
            input: Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                message_metadata: Metadata to associate with new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when new thread being created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                run_metadata: Metadata to associate with new run.
            config: Runnable config. Defaults to None.

        Return:
            If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish].
                Otherwise, will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].
        	callbackstagsmetadatainheritable_callbacksinheritable_tagsinheritable_metadatarun_namer\   intermediate_stepsr!   usercontentmessage_metadataroler   rv   thread_metadatamessagesrv   r    r   r   rv   Nrv   r&   )r   r   	configuregeton_chain_startr   get_namerV   _parse_intermediate_stepsrQ   rj   threadsrunssubmit_tool_outputs_create_thread_and_runr   rl   _create_run_wait_for_runrm   r!   BaseExceptionon_chain_error_get_responserJ   on_chain_end)rZ   rp   rq   callback_managerrun_managertool_outputsrunthread_r>   responser&   r&   r)   invoke  s`   


zOpenAIAssistantRunnable.invoke)rR   <Optional[Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]]c          	        sL   |pt  }dd |D }|jjj||||dI dH }| d|j|d|S )a	  Async create an AsyncOpenAI Assistant and instantiate the Runnable.

        Args:
            name: Assistant name.
            instructions: Assistant instructions.
            tools: Assistant tools. Can be passed in OpenAI format or as BaseTools.
            model: Assistant model to use.
            async_client: AsyncOpenAI client.
                Will create default async_client if not specified.

        Returns:
            AsyncOpenAIAssistantRunnable configured to run using the created assistant.
        c                 S  rc   r&   rd   re   r&   r&   r)   rg     rh   z=OpenAIAssistantRunnable.acreate_assistant.<locals>.<listcomp>ri   N)rS   rR   r&   )rC   rj   rk   rl   rm   )	r(   r\   r]   r^   r`   rR   rb   openai_toolsrn   r&   r&   r)   acreate_assistanti  s   
z)OpenAIAssistantRunnable.acreate_assistantc              
     s  |pi }t j|d|d|dd}|jt| ||dp$|  d}z| jrJ|drJ| |d I dH }| jj	j
jjdi |I dH }nSd	|vrjd
|d |ddg|dd}| ||I dH }n3d|vr| jj	j
jj|d	 |d d
|ddI dH }	| |I dH }n| jj	j
jjdi |I dH }| |j|jI dH }W n ty }
 z||
 |
d}
~
ww z| |}W n ty }
 z|j|
| d |
d}
~
ww || |S )a  Async invoke assistant.

        Args:
            input: Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                message_metadata: Metadata to associate with a new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when a new thread is created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                run_metadata: Metadata to associate with new run.
            config: Runnable config. Defaults to None.
            kwargs: Additional arguments.

        Return:
            If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish].
                Otherwise, will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].
        rt   ru   rv   rw   r{   r|   r}   Nr!   r~   r   r   r   r   r   r    r   r   r&   )r   r   r   r   r   r   rV   _aparse_intermediate_stepsrR   rj   r   r   r   _acreate_thread_and_runr   rl   _acreate_run_await_for_runrm   r!   r   r   r   rJ   r   )rZ   rp   rq   rb   r   r   r   r   r   r   r>   r   r&   r&   r)   ainvoke  sj   



zOpenAIAssistantRunnable.ainvoker}   'List[Tuple[OpenAIAssistantAction, str]]c                   sb   |d \}}|  |j|j}t  |jrdd |jjjD   fdd|D }||j|jd}|S )Nc                 S     h | ]}|j qS r&   rm   rf   tcr&   r&   r)   	<setcomp>      zDOpenAIAssistantRunnable._parse_intermediate_steps.<locals>.<setcomp>c                   *   g | ]\}}|j  v rt||j d qS )outputr5   r5   r   rf   actionr   required_tool_call_idsr&   r)   rg     
    
zEOpenAIAssistantRunnable._parse_intermediate_steps.<locals>.<listcomp>r   r    r!   r   r    r!   setrequired_actionr   
tool_callsrZ   r}   last_actionlast_outputr   r   r   r&   r   r)   r     s   
z1OpenAIAssistantRunnable._parse_intermediate_stepsc                 C  s6   dd |  D }| jjjjj|d fd| ji|S )Nc                 S     i | ]\}}|d v r||qS )r]   r`   r^   run_metadatar&   rf   kvr&   r&   r)   
<dictcomp>  
    z7OpenAIAssistantRunnable._create_run.<locals>.<dictcomp>r!   rS   )itemsrQ   rj   r   r   rl   rS   rZ   rp   paramsr&   r&   r)   r     s   z#OpenAIAssistantRunnable._create_runr   c                 C  s4   dd |  D }| jjjjd| j|d|}|S )Nc                 S  r   r   r&   r   r&   r&   r)   r     r   zBOpenAIAssistantRunnable._create_thread_and_run.<locals>.<dictcomp>rS   r   r&   )r   rQ   rj   r   create_and_runrS   rZ   rp   r   r   r   r&   r&   r)   r     s   z.OpenAIAssistantRunnable._create_thread_and_runr   c                   s  j dkrrdd l t jjdd }t jjdd }|dkp*|dko*|dk| jjjj	j
jdd}fdd	|D }| jsE|S d
d	 |D }t fdd|D rbddd |D }t|jjddjjdS j dkr| jsjjjS g }jjjD ]H}|j}	z
tj|	jdd}
W n ty } ztd|	j d|	j |d }~ww t|
dkrd|
v r|
d }
|t|	j|
|jdjjd q|S tj dd}tdj  d| d)N	completedr   .      ascorderc                      g | ]
}|j  jkr|qS r&   r    rm   rf   msgr   r&   r)   rg   "      z9OpenAIAssistantRunnable._get_response.<locals>.<listcomp>c                 S     g | ]
}|j D ]}|qqS r&   r   rf   r   msg_contentr&   r&   r)   rg   %  
    c                 3  8    | ]}rt | jjjjnt | jjjjV  qd S NrI   typesrj   r   TextContentBlockMessageContentTextrf   r   r9   version_gte_1_14r&   r)   	<genexpr>(      
z8OpenAIAssistantRunnable._get_response.<locals>.<genexpr>
c                 s      | ]}|j jV  qd S r   textvaluer   r&   r&   r)   r   2      r   r!   r     return_valueslogr    r!   requires_actionFstrict*Received invalid JSON function arguments:  for function __arg1rD   
tool_inputr5   r   r    r!      indentUnexpected run status: . Full run info:

)) statusr9   intversionVERSIONsplitrQ   rj   r   r   listr!   rV   alljoinr   rm   r   r   r   functionjsonloads	argumentsr   
ValueErrorr\   lenappendr4   dumpsrJ   rZ   r   major_versionminor_versionr   new_messagesansweractions	tool_callr  argsr>   run_infor&   r9   r   r   r)   r     s   





z%OpenAIAssistantRunnable._get_responser    r!   c                 C  sB   d}|r| j jjjj||d}|jdv }|rt| jd  |s|S NT)r!   )in_progressqueuedi  )rQ   rj   r   r   retriever  r   rU   rZ   r    r!   r  r   r&   r&   r)   r   ]  s   
z%OpenAIAssistantRunnable._wait_for_runc                   sj   |d \}}|  |j|jI d H }t  |jr"dd |jjjD   fdd|D }||j|jd}|S )Nr   c                 S  r   r&   r   r   r&   r&   r)   r   m  r   zEOpenAIAssistantRunnable._aparse_intermediate_steps.<locals>.<setcomp>c                   r   r   r   r   r   r&   r)   rg   p  r   zFOpenAIAssistantRunnable._aparse_intermediate_steps.<locals>.<listcomp>r   r   r   r&   r   r)   r   f  s    
z2OpenAIAssistantRunnable._aparse_intermediate_stepsc                   s>   dd |  D }| jjjjj|d fd| ji|I d H S )Nc                 S  r   r   r&   r   r&   r&   r)   r   }  r   z8OpenAIAssistantRunnable._acreate_run.<locals>.<dictcomp>r!   rS   )r   rR   rj   r   r   rl   rS   r   r&   r&   r)   r   |  s   z$OpenAIAssistantRunnable._acreate_runc                   s<   dd |  D }| jjjjd| j|d|I d H }|S )Nc                 S  r   r   r&   r   r&   r&   r)   r     r   zCOpenAIAssistantRunnable._acreate_thread_and_run.<locals>.<dictcomp>r   r&   )r   rR   rj   r   r   rS   r   r&   r&   r)   r     s   z/OpenAIAssistantRunnable._acreate_thread_and_runc                   s  j dkrvdd l t jjdd }t jjdd }|dkp+|dko+|dk| jjjj	j
jddI d H }fdd	|D }| jsI|S d
d	 |D }t fdd|D rfddd |D }t|jjddjjdS j dkr| jsjjjS g }jjjD ]H}|j}	z
tj|	jdd}
W n ty } ztd|	j d|	j |d }~ww t|
dkrd|
v r|
d }
|t|	j|
|jdjjd q|S tj dd}tdj  d| d)Nr   r   r   r   r   r   r   c                   r   r&   r   r   r   r&   r)   rg     r   z:OpenAIAssistantRunnable._aget_response.<locals>.<listcomp>c                 S  r   r&   r   r   r&   r&   r)   rg     r   c                 3  r   r   r   r   r   r&   r)   r     r   z9OpenAIAssistantRunnable._aget_response.<locals>.<genexpr>r   c                 s  r   r   r   r   r&   r&   r)   r     r   r   r   r   r   Fr   r   r   r   r   r   r   r  r  r  ) r  r9   r  r  r  r  rR   rj   r   r   r	  r!   rV   r
  r  r   rm   r   r   r   r  r  r  r  r   r  r\   r  r  r4   r  rJ   r  r&   r  r)   _aget_response  s   





z&OpenAIAssistantRunnable._aget_responsec                   sR   d}|r'| j jjjj||dI d H }|jdv }|r%t| jd I d H  |s|S r  )	rR   rj   r   r   r!  r  asyncior   rU   r"  r&   r&   r)   r     s   
z&OpenAIAssistantRunnable._await_for_run)r"   r   )r\   r   r]   r   r^   r_   r`   r   rQ   ra   rb   r   r"   rO   r   )rp   rJ   rq   rr   r"   rs   )r\   r   r]   r   r^   r_   r`   r   rR   r   rb   r   r"   rO   )rp   rJ   rq   rr   rb   r   r"   rs   )r}   r   r"   rJ   )rp   rJ   r"   r   )rp   rJ   r   rJ   r"   r   )r   r   r"   r   )r    r   r!   r   r"   r   )r.   r/   r0   r1   r   r?   rQ   r2   rR   rU   rV   r   r[   r3   ro   r   r   r   r   r   r   r   r   r   r   r   r#  r   r&   r&   r&   r)   rO      sD   
 M"R
$
W



J
	


JrO   )r"   r6   )r"   rA   )rD   rE   r"   r#   )rD   rE   r"   rM   )4
__future__r   r$  r  r   timer   typingr   r   r   r   r	   r
   r   r   r   r   langchain_core.agentsr   r   langchain_core.callbacksr   langchain_core.loadr   langchain_core.runnablesr   r   r   langchain_core.toolsr   %langchain_core.utils.function_callingr   pydanticr   r   r   typing_extensionsr   r9   openai.types.beta.threadsr   <openai.types.beta.threads.required_action_function_tool_callr   r   r4   r?   rC   rL   rN   rs   rO   r&   r&   r&   r)   <module>   s@    0



