
    h                        d Z ddlmZ ddlmZ ddlmZ ddlmZ ddl	m
Z
 ddlmZmZ ddlmZ dd	lmZ  G d
 d      Zy)z!Experiment with different models.    )annotations)Sequence)Optional)BaseLLM)PromptTemplate)get_color_mapping
print_text)Chain)LLMChainc                  B    e Zd ZdZdddZe	 d	 	 	 	 	 dd       Zd	dZy)
ModelLaboratoryzMA utility to experiment with and compare the performance of different models.Nc                   |D ]y  }t        |t              sd}t        |      t        |j                        dk7  rd|j                   }t        |      t        |j
                        dk7  skd|j
                   }{ |$t        |      t        |      k7  rd}t        |      || _        t        t        | j                              D cg c]  }t        |       }}t        |      | _
        || _        yc c}w )a  Initialize the ModelLaboratory with chains to experiment with.

        Args:
            chains (Sequence[Chain]): A sequence of chains to experiment with.
            Each chain must have exactly one input and one output variable.
        names (Optional[List[str]]): Optional list of names corresponding to each chain.
            If provided, its length must match the number of chains.


        Raises:
            ValueError: If any chain is not an instance of `Chain`.
            ValueError: If a chain does not have exactly one input variable.
            ValueError: If a chain does not have exactly one output variable.
            ValueError: If the length of `names` does not match the number of chains.
        zModelLaboratory should now be initialized with Chains. If you want to initialize with LLMs, use the `from_llms` method instead (`ModelLaboratory.from_llms(...)`)   z;Currently only support chains with one input variable, got z<Currently only support chains with one output variable, got Nz0Length of chains does not match length of names.)
isinstancer
   
ValueErrorlen
input_keysoutput_keyschainsrangestrr   chain_colorsnames)selfr   r   chainmsgichain_ranges          X/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/langchain/model_laboratory.py__init__zModelLaboratory.__init__   s      	EeU+A 
 !o%5##$) ++,.  !o%5$$%* ,,-/ 	& Us6{!:DCS/!',S-='>?!s1v??-k:
 @s   C8c                    |t        dgd      }|D cg c]  }t        ||       }}|D cg c]  }t        |       }} | ||      S c c}w c c}w )a  Initialize the ModelLaboratory with LLMs and an optional prompt.

        Args:
            llms (List[BaseLLM]): A list of LLMs to experiment with.
            prompt (Optional[PromptTemplate]): An optional prompt to use with the LLMs.
                If provided, the prompt must contain exactly one input variable.

        Returns:
            ModelLaboratory: An instance of `ModelLaboratory` initialized with LLMs.
        _inputz{_input})input_variablestemplate)llmprompt)r   )r   r   r   )clsllmsr&   r%   r   r   s         r   	from_llmszModelLaboratory.from_llms>   s^      >#XJTF>BCs(s62CC%)*cS**6'' D*s
   AAc                ,   t        d| d       t        | j                        D ]m  \  }}| j                  | j                  |   n
t	        |      }t        |d       |j                  |      }t        || j                  t	        |         d       o y)a3  Compare model outputs on an input text.

        If a prompt was provided with starting the laboratory, then this text will be
        fed into the prompt. If no prompt was provided, then the input text is the
        entire prompt.

        Args:
            text: input text to run all models on.
        z[1mInput:[0m

N)endz

)colorr,   )print	enumerater   r   r   r	   runr   )r   textr   r   nameoutputs         r   comparezModelLaboratory.compareT   s     	&tfB/0!$++. 	LHAu$(JJ$:4::a=E
Dt&YYt_FvT%6%6s1v%>FK		L    )N)r   zSequence[Chain]r   zOptional[list[str]])r(   zlist[BaseLLM]r&   zOptional[PromptTemplate]returnr   )r1   r   r6   None)__name__
__module____qualname____doc__r    classmethodr)   r4    r5   r   r   r      sF    W)V  ,0(( )( 
	( (*Lr5   r   N)r;   
__future__r   collections.abcr   typingr   #langchain_core.language_models.llmsr   langchain_core.prompts.promptr   langchain_core.utils.inputr   r	   langchain.chains.baser
   langchain.chains.llmr   r   r=   r5   r   <module>rF      s.    ' " $  7 8 D ' )SL SLr5   