
    gh@                    .   d Z ddlmZ ddlmZmZmZ ddlmZ ddl	m
Z
mZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ  eddd           G d de                      Z G d deee e f                            Z!dS )z+Base classes for LLM-powered router chains.    )annotations)AnyOptionalcast)
deprecated)AsyncCallbackManagerForChainRunCallbackManagerForChainRun)OutputParserException)BaseLanguageModel)BaseOutputParser)BasePromptTemplate)parse_and_check_json_markdown)model_validator)SelfLLMChain)RouterChainz0.2.12z1.0zUse RunnableLambda to select from multiple prompt templates. See example in API reference: https://api.python.langchain.com/en/latest/chains/langchain.chains.router.llm_router.LLMRouterChain.html)sinceremovalmessagec                       e Zd ZU dZded<   	  ed          dd            Zedd
            Zd fdZ		 d d!dZ
	 d d"dZed#d            Z xZS )$LLMRouterChaina
	  A router chain that uses an LLM chain to perform routing.

    This class is deprecated. See below for a replacement, which offers several
    benefits, including streaming and batch support.

    Below is an example implementation:

        .. code-block:: python

            from operator import itemgetter
            from typing import Literal
            from typing_extensions import TypedDict

            from langchain_core.output_parsers import StrOutputParser
            from langchain_core.prompts import ChatPromptTemplate
            from langchain_core.runnables import RunnableLambda, RunnablePassthrough
            from langchain_openai import ChatOpenAI

            llm = ChatOpenAI(model="gpt-4o-mini")

            prompt_1 = ChatPromptTemplate.from_messages(
                [
                    ("system", "You are an expert on animals."),
                    ("human", "{query}"),
                ]
            )
            prompt_2 = ChatPromptTemplate.from_messages(
                [
                    ("system", "You are an expert on vegetables."),
                    ("human", "{query}"),
                ]
            )

            chain_1 = prompt_1 | llm | StrOutputParser()
            chain_2 = prompt_2 | llm | StrOutputParser()

            route_system = "Route the user's query to either the animal or vegetable expert."
            route_prompt = ChatPromptTemplate.from_messages(
                [
                    ("system", route_system),
                    ("human", "{query}"),
                ]
            )


            class RouteQuery(TypedDict):
                """Route query to destination."""
                destination: Literal["animal", "vegetable"]


            route_chain = (
                route_prompt
                | llm.with_structured_output(RouteQuery)
                | itemgetter("destination")
            )

            chain = {
                "destination": route_chain,  # "animal" or "vegetable"
                "query": lambda x: x["query"],  # pass through input query
            } | RunnableLambda(
                # if animal, chain_1. otherwise, chain_2.
                lambda x: chain_1 if x["destination"] == "animal" else chain_2,
            )

            chain.invoke({"query": "what color are carrots"})
    r   	llm_chainafter)modereturnr   c                J    | j         j        }|j        t          d          | S )NzLLMRouterChain requires base llm_chain prompt to have an output parser that converts LLM text output to a dictionary with keys 'destination' and 'next_inputs'. Received a prompt with no output parser.)r   promptoutput_parser
ValueError)selfr   s     _/var/www/FlaskApp/flask-venv/lib/python3.11/site-packages/langchain/chains/router/llm_router.pyvalidate_promptzLLMRouterChain.validate_prompth   s3    &'       	list[str]c                    | j         j        S )zTWill be whatever keys the LLM chain prompt expects.

        :meta private:
        )r   
input_keys)r!   s    r"   r'   zLLMRouterChain.input_keyst   s     ~((r$   outputsdict[str, Any]Nonec                    t                                          |           t          |d         t                    st          d S )Nnext_inputs)super_validate_outputs
isinstancedictr    )r!   r(   	__class__s     r"   r.   z LLMRouterChain._validate_outputs|   sB    !!'***'-0$77 		 	r$   Ninputsrun_manager$Optional[CallbackManagerForChainRun]c                   |pt          j                    }|                                } | j        j        dd|i|}t          t          t          t          f         | j        j	        j
                            |                    }|S N	callbacks )r	   get_noop_manager	get_childr   predictr   r0   strr   r   r   parse)r!   r2   r3   _run_managerr7   
predictionoutputs          r"   _callzLLMRouterChain._call   s    
 #S&@&Q&S&S **,,	+T^+JJiJ6JJ
cNN!/55jAA
 
 r$   )Optional[AsyncCallbackManagerForChainRun]c                   K   |pt          j                    }|                                }t          t          t
          t          f          | j        j        dd|i| d {V           }|S r6   )	r	   r9   r:   r   r0   r<   r   r   apredict_and_parse)r!   r2   r3   r>   r7   r@   s         r"   _acallzLLMRouterChain._acall   s}      
 #S&@&Q&S&S **,,	cN3$.3RRiR6RRRRRRRR
 
 r$   llmr   r   r   kwargsr   c                8    t          ||          } | dd|i|S )zConvenience constructor.)rF   r   r   r8   r   )clsrF   r   rG   r   s        r"   from_llmzLLMRouterChain.from_llm   s1    
 V444	s11Y1&111r$   )r   r   )r   r%   )r(   r)   r   r*   )N)r2   r)   r3   r4   r   r)   )r2   r)   r3   rB   r   r)   )rF   r   r   r   rG   r   r   r   )__name__
__module____qualname____doc____annotations__r   r#   propertyr'   r.   rA   rE   classmethodrJ   __classcell__)r1   s   @r"   r   r      s        A AF +_'"""	 	 	 #"	 ) ) ) X)      =A    $ BF     2 2 2 [2 2 2 2 2r$   r   c                  F    e Zd ZU dZdZded<   eZded<   dZded<   ddZ	dS )RouterOutputParserz<Parser for output of router chain in the multi-prompt chain.DEFAULTr<   default_destinationtypenext_inputs_typeinputnext_inputs_inner_keytextr   r)   c                0   	 ddg}t          ||          }t          |d         t                    st          d          t          |d         | j                  st          d| j         d          | j        |d         i|d<   |d                                                                         | j                                        k    rd |d<   n|d                                         |d<   |S # t          $ r}t          d| d|           d }~ww xY w)Ndestinationr,   z&Expected 'destination' to be a string.zExpected 'next_inputs' to be .zParsing text
z
 raised following error:
)r   r/   r<   r    rX   rZ   striplowerrV   	Exceptionr
   )r!   r[   expected_keysparsedes        r"   r=   zRouterOutputParser.parse   sL   	*M:M24GGFf]3S99 K !IJJJf]3T5JKK  LD4ILLL   &*%?AV$WF=!}%++--3355+11334 4 )-}%%(.}(=(C(C(E(E}%M 	 	 	'FFF1FF  	s   C.C1 1
D;DDN)r[   r<   r   r)   )
rK   rL   rM   rN   rV   rO   r<   rX   rZ   r=   r8   r$   r"   rT   rT      sf         FF(((((     !(((((     r$   rT   N)"rN   
__future__r   typingr   r   r   langchain_core._apir   langchain_core.callbacksr   r	   langchain_core.exceptionsr
   langchain_core.language_modelsr   langchain_core.output_parsersr   langchain_core.promptsr   langchain_core.utils.jsonr   pydanticr   typing_extensionsr   langchain.chainsr   langchain.chains.router.baser   r   r0   r<   rT   r8   r$   r"   <module>rr      s   1 1 " " " " " " & & & & & & & & & & * * * * * *        < ; ; ; ; ; < < < < < < : : : : : : 5 5 5 5 5 5 C C C C C C $ $ $ $ $ $ " " " " " " % % % % % % 4 4 4 4 4 4 
	s	  B2 B2 B2 B2 B2[ B2 B2 B2J    )$sCx.9     r$   