
    7|h                         d dl mZmZmZmZmZ d dlZd dlmZ d dl	m
Z
 d dlmZmZ d dlmZ d dlmZ dZd	Zd
Zdj+                  eede      Z G d de
      Zy)    )AnyDictListMappingOptionalN)CallbackManagerForLLMRun)LLM)get_from_dict_or_envpre_init)
ConfigDict)enforce_stop_tokensz### Instruction:z### Response:ziBelow is an instruction that describes a task. Write a response that appropriately completes the request.z7{intro}
{instruction_key}
{instruction}
{response_key}
z{instruction})introinstruction_keyinstructionresponse_keyc                      e Zd ZU dZdZeed<   	 dZeed<   	 dZ	e
e   ed<   	 dZeed	<   	 dZe
e   ed
<    ed      Zededefd       Zedeeef   fd       Zedefd       ZdedefdZ	 	 	 ddede
ee      de
e   dededefdZy)MosaicMLa2  MosaicML LLM service.

    To use, you should have the
    environment variable ``MOSAICML_API_TOKEN`` set with your API token, or pass
    it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            from langchain_community.llms import MosaicML
            endpoint_url = (
                "https://models.hosted-on.mosaicml.hosting/mpt-7b-instruct/v1/predict"
            )
            mosaic_llm = MosaicML(
                endpoint_url=endpoint_url,
                mosaicml_api_token="my-api-key"
            )
    zDhttps://models.hosted-on.mosaicml.hosting/mpt-7b-instruct/v1/predictendpoint_urlFinject_instruction_formatNmodel_kwargsg      ?retry_sleepmosaicml_api_tokenforbid)extravaluesreturnc                 *    t        |dd      }||d<   |S )z?Validate that api key and python package exists in environment.r   MOSAICML_API_TOKEN)r
   )clsr   r   s      `/var/www/html/test/engine/venv/lib/python3.12/site-packages/langchain_community/llms/mosaicml.pyvalidate_environmentzMosaicML.validate_environmentB   s*     2(*>
 (:#$    c                 J    | j                   xs i }i d| j                  id|iS )zGet the identifying parameters.r   r   )r   r   )self_model_kwargss     r    _identifying_paramszMosaicML._identifying_paramsK   s>     ))/R
t001
}-
 	
r"   c                      y)zReturn type of llm.mosaic )r$   s    r    	_llm_typezMosaicML._llm_typeT   s     r"   promptc                 J    | j                   rt        j                  |      }|S )zTransform prompt.)r   )r   PROMPT_FOR_GENERATION_FORMATformat)r$   r+   s     r    _transform_promptzMosaicML._transform_promptY   s*    ))188" 9 F r"   stoprun_manageris_retrykwargsc                    | j                   xs i }| j                  |      }d|gi}|j                  |       |j                  |       | j                   dd}	 t	        j
                  | j                  ||      }		 |	j                  dk(  rN|s4ddl}|j                  | j                         | j                  |||d	
      S t        d|	j                         |	j!                         }t#        |t$              rOg d}|D ]  }||v s||   } n! t        ddj'                  |       d|       t#        |t(              r|d   }n|}nt        d|       |j+                  |      r|t-        |      d }|t1        ||      }|S # t        j                  j                  $ r}
t        d|
       d}
~
ww xY w# t        j                  j.                  $ r }
t        d|
 d|	j                         d}
~
ww xY w)az  Call out to a MosaicML LLM inference endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = mosaic_llm.invoke("Tell me a joke.")
        inputszapplication/json)AuthorizationzContent-Type)headersjsonz$Error raised by inference endpoint: Ni  r   T)r2   z>Error raised by inference API: rate limit exceeded.
Response: )dataoutputoutputszNo valid key (z, z) in response: zUnexpected response type: zError raised by inference API: z.
Response: )r   r/   updater   requestspostr   
exceptionsRequestException
ValueErrorstatus_codetimesleepr   _calltextr8   
isinstancedictjoinlist
startswithlenJSONDecodeErrorr   )r$   r+   r0   r1   r2   r3   r%   payloadr7   responseerC   parsed_responseoutput_keyskeyoutput_itemrF   s                    r    rE   zMosaicML._calla   s)   , ))/R''/fX&}%v !% 7 78.
	I}}T%6%6gVH+	##s*JJt//0::fdK$:OO U}}o' 
 'mmoO /40;& Co-&5c&:
 %(;)?(@ A+,.  k40&q>D&D #=o=N!OPP v&CKM* &tT2Dg ""33 	ICA3GHH	IV ""22 	1!M(--Q 	s?   "F =AF9 AF9 A.F9 F6#F11F69G6G11G6)NNF)__name__
__module____qualname____doc__r   str__annotations__r   boolr   r   rH   r   floatr   r   model_configr   r   r!   propertyr   r   r&   r*   r/   r   r   rE   r)   r"   r    r   r      s:   ( 	O #  &+t+C#'L(4.'1KE(,,L $ 4   
WS#X%6 
 
 3     %):>ZZ tCy!Z 67	Z
 Z Z 
Zr"   r   )typingr   r   r   r   r   r=   langchain_core.callbacksr   #langchain_core.language_models.llmsr	   langchain_core.utilsr
   r   pydanticr   langchain_community.llms.utilsr   INSTRUCTION_KEYRESPONSE_KEYINTRO_BLURBr.   r-   r   r)   r"   r    <module>rh      sg    5 5  = 3 ?  >$A   F
#	  	 ^s ^r"   