o
    Jjg                     @  s   d Z ddlmZ ddlmZmZmZmZmZ ddl	m
Z
 ddlmZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZ ddlmZmZ ddlmZ G dd deZdS )zLUse a single chain to route an input to one of multiple retrieval qa chains.    )annotations)AnyDictListMappingOptional)BaseLanguageModel)PromptTemplate)BaseRetriever)ConversationChain)Chain)DEFAULT_TEMPLATE)BaseRetrievalQARetrievalQA)MultiRouteChain)LLMRouterChainRouterOutputParser)MULTI_RETRIEVAL_ROUTER_TEMPLATEc                   @  sZ   e Zd ZU dZded< 	 ded< 	 ded< 	 edd
dZe			ddddddZdS ) MultiRetrievalQAChainz\A multi-route chain that uses an LLM router chain to choose amongst retrieval
    qa chains.r   router_chainzMapping[str, BaseRetrievalQA]destination_chainsr   default_chainreturn	List[str]c                 C  s   dgS )Nresult )selfr   r   b/var/www/html/zoom/venv/lib/python3.10/site-packages/langchain/chains/router/multi_retrieval_qa.pyoutput_keys!   s   z!MultiRetrievalQAChain.output_keysN)default_chain_llmllmr   retriever_infosList[Dict[str, Any]]default_retrieverOptional[BaseRetriever]default_promptOptional[PromptTemplate]Optional[Chain]r   Optional[BaseLanguageModel]kwargsr   c                K  s  |r|st ddd |D }d|}	tj|	d}
t|
dgtddd	}t||}i }|D ]}|d
}|d }t	j|||d}|d }|||< q/|rP|}n)|r[t	j|||d}nt
dd}t|ddgd}|d u rqtdt||ddd}| d|||d|S )Nzf`default_retriever` must be specified if `default_prompt` is provided. Received only `default_prompt`.c                 S  s"   g | ]}|d   d|d  qS )namez: descriptionr   ).0rr   r   r   
<listcomp>6   s   " z9MultiRetrievalQAChain.from_retrievers.<locals>.<listcomp>
)destinationsinputquery)next_inputs_inner_key)templateinput_variablesoutput_parserprompt	retriever)r7   r8   r*   history)r4   r5   zconversation_llm must be provided if default_chain is not specified. This API has been changed to avoid instantiating default LLMs on behalf of users.You can provide a conversation LLM like so:
from langchain_openai import ChatOpenAI
llm = ChatOpenAI()r   )r    r7   	input_key
output_key)r   r   r   r   )
ValueErrorjoinr   formatr	   r   r   from_llmgetr   r   replaceNotImplementedErrorr   )clsr    r!   r#   r%   r   r   r)   r0   destinations_strrouter_templaterouter_promptr   r   r_infor7   r8   chainr*   _default_chainprompt_templater   r   r   from_retrievers%   sb   


z%MultiRetrievalQAChain.from_retrievers)r   r   )NNN)r    r   r!   r"   r#   r$   r%   r&   r   r'   r   r(   r)   r   r   r   )	__name__
__module____qualname____doc____annotations__propertyr   classmethodrK   r   r   r   r   r      s"   
 r   N) rO   
__future__r   typingr   r   r   r   r   langchain_core.language_modelsr   langchain_core.promptsr	   langchain_core.retrieversr
   langchain.chainsr   langchain.chains.baser   $langchain.chains.conversation.promptr   "langchain.chains.retrieval_qa.baser   r   langchain.chains.router.baser   "langchain.chains.router.llm_routerr   r   .langchain.chains.router.multi_retrieval_promptr   r   r   r   r   r   <module>   s    