o
    Jjg
                     @  sN   d dl mZ d dlmZmZmZ d dlmZmZ d dl	m
Z
mZ dddZdS )    )annotations)AnyDictUnion)BaseRetrieverRetrieverOutput)RunnableRunnablePassthrough	retriever5Union[BaseRetriever, Runnable[dict, RetrieverOutput]]combine_docs_chainRunnable[Dict[str, Any], str]returnr   c                 C  sD   t | ts| }ndd | B }tj|jdddj|djdd}|S )af  Create retrieval chain that retrieves documents and then passes them on.

    Args:
        retriever: Retriever-like object that returns list of documents. Should
            either be a subclass of BaseRetriever or a Runnable that returns
            a list of documents. If a subclass of BaseRetriever, then it
            is expected that an `input` key be passed in - this is what
            is will be used to pass into the retriever. If this is NOT a
            subclass of BaseRetriever, then all the inputs will be passed
            into this runnable, meaning that runnable should take a dictionary
            as input.
        combine_docs_chain: Runnable that takes inputs and produces a string output.
            The inputs to this will be any original inputs to this chain, a new
            context key with the retrieved documents, and chat_history (if not present
            in the inputs) with a value of `[]` (to easily enable conversational
            retrieval.

    Returns:
        An LCEL Runnable. The Runnable return is a dictionary containing at the very
        least a `context` and `answer` key.

    Example:
        .. code-block:: python

            # pip install -U langchain langchain-community

            from langchain_community.chat_models import ChatOpenAI
            from langchain.chains.combine_documents import create_stuff_documents_chain
            from langchain.chains import create_retrieval_chain
            from langchain import hub

            retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat")
            llm = ChatOpenAI()
            retriever = ...
            combine_docs_chain = create_stuff_documents_chain(
                llm, retrieval_qa_chat_prompt
            )
            retrieval_chain = create_retrieval_chain(retriever, combine_docs_chain)

            chain.invoke({"input": "..."})

    c                 S  s   | d S )Ninput )xr   r   R/var/www/html/zoom/venv/lib/python3.10/site-packages/langchain/chains/retrieval.py<lambda>=   s    z(create_retrieval_chain.<locals>.<lambda>retrieve_documents)run_name)context)answerretrieval_chain)
isinstancer   r	   assignwith_config)r
   r   retrieval_docsr   r   r   r   create_retrieval_chain   s   
.
r   N)r
   r   r   r   r   r   )
__future__r   typingr   r   r   langchain_core.retrieversr   r   langchain_core.runnablesr   r	   r   r   r   r   r   <module>   s
    