
    7|hn                         d dl Z d dlmZmZmZmZ d dlZd dlmZ d dl	m
Z
 d dlmZ  e j                  e      Z G d de
      Zy)    N)AnyListMappingOptional)CallbackManagerForLLMRun)LLM)enforce_stop_tokensc                       e Zd ZU dZdZeed<   	 dZee	   ed<   	 dZ
eed<   	 dZeed	<   	 g Zee   ed
<   	 dZeed<   	 dZeed<   	 edefd       Zedeeef   fd       Z	 	 ddedeee      dee   dedef
dZy)ChatGLMa.  ChatGLM LLM service.

    Example:
        .. code-block:: python

            from langchain_community.llms import ChatGLM
            endpoint_url = (
                "http://127.0.0.1:8000"
            )
            ChatGLM_llm = ChatGLM(
                endpoint_url=endpoint_url
            )
    zhttp://127.0.0.1:8000/endpoint_urlNmodel_kwargsi N  	max_tokeng?temperaturehistorygffffff?top_pFwith_historyreturnc                      y)Nchat_glm )selfs    _/var/www/html/test/engine/venv/lib/python3.12/site-packages/langchain_community/llms/chatglm.py	_llm_typezChatGLM._llm_type+   s        c                 J    | j                   xs i }i d| j                  id|iS )zGet the identifying parameters.r   r   )r   r   )r   _model_kwargss     r   _identifying_paramszChatGLM._identifying_params/   s>     ))/R
t001
}-
 	
r   promptstoprun_managerkwargsc                 t   | j                   xs i }ddi}|| j                  | j                  | j                  | j                  d}|j                  |       |j                  |       t        j                  d|        	 t        j                  | j                  ||      }t        j                  d|        |j                  d	k7  rt        d
|       	 |j                         }
t!        |
t"              rd}||
v r|
|   }nt        d|
       t        d|
       	 |t)        ||      }| j*                  r
|
d   | _        |S # t        j                  j                  $ r}	t        d|	       d}	~	ww xY w# t        j                  j$                  $ r }	t        d|	 d|j&                         d}	~	ww xY w)aw  Call out to a ChatGLM LLM inference endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = chatglm_llm.invoke("Who are you?")
        zContent-Typezapplication/json)r   r   r   
max_lengthr   zChatGLM payload: )headersjsonz$Error raised by inference endpoint: NzChatGLM response:    zFailed with response: responsezNo content in response : zUnexpected response type: z?Error raised during decoding response from inference endpoint: z.
Response: r   )r   r   r   r   r   updateloggerdebugrequestspostr   
exceptionsRequestException
ValueErrorstatus_coder%   
isinstancedictJSONDecodeErrortextr	   r   )r   r   r   r    r!   r   r$   payloadr'   eparsed_responsecontent_keysr4   s                r   _callzChatGLM._call8   s   , ))/R "#56 ++||..ZZ
 	}%v(	23	I}}T%6%6gVH 	)(453&5hZ@AA	&mmoO /40)?2*<8D$'@@Q%RSS #=o=N!OPP	  &tT2D*95DL? ""33 	ICA3GHH	I* ""22 	QRSQT U'}}o/ 	s1    "E AE: E7$E22E7:F7F22F7)NN)__name__
__module____qualname____doc__r   str__annotations__r   r   r2   r   intr   floatr   r   r   r   boolpropertyr   r   r   r   r   r9   r   r   r   r   r      s     1L#0#'L(4.'1Is1K-GT$Z%E50L$'3   
WS#X%6 
 
 %):>	II tCy!I 67	I
 I 
Ir   r   )loggingtypingr   r   r   r   r+   langchain_core.callbacksr   #langchain_core.language_models.llmsr   langchain_community.llms.utilsr	   	getLoggerr:   r)   r   r   r   r   <module>rJ      s:     / /  = 3 >			8	$tc tr   