
    7|hv5                        d dl mZ d dlZd dlZd dlmZmZmZm	Z	m
Z
 d dlmZ d dlmZ d dlmZ d dlmZmZmZ d dlmZ d	Zd
ZdZdZ ej6                  e      Z eddd       G d de             Zy)    )annotationsN)AnyIteratorListMappingOptional)
deprecated)CallbackManagerForLLMRun)BaseLLM)
GenerationGenerationChunk	LLMResult)
ConfigDictgpt2text-generation)text2text-generationr   summarizationtranslation   z0.0.37z1.0z)langchain_huggingface.HuggingFacePipeline)sinceremovalalternative_importc                     e Zd ZU dZdZded<   eZded<   	 dZded<   	 dZ	ded	<   	 e
Zd
ed<   	  ed      Zeddddde
f	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd       Zedd       Zedd       Z	 	 d	 	 	 	 	 	 	 	 	 ddZ	 	 d	 	 	 	 	 	 	 	 	 ddZy)HuggingFacePipelinea\  HuggingFace Pipeline API.

    To use, you should have the ``transformers`` python package installed.

    Only supports `text-generation`, `text2text-generation`, `summarization` and
    `translation`  for now.

    Example using from_model_id:
        .. code-block:: python

            from langchain_community.llms import HuggingFacePipeline
            hf = HuggingFacePipeline.from_model_id(
                model_id="gpt2",
                task="text-generation",
                pipeline_kwargs={"max_new_tokens": 10},
            )
    Example passing pipeline in directly:
        .. code-block:: python

            from langchain_community.llms import HuggingFacePipeline
            from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline

            model_id = "gpt2"
            tokenizer = AutoTokenizer.from_pretrained(model_id)
            model = AutoModelForCausalLM.from_pretrained(model_id)
            pipe = pipeline(
                "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=10
            )
            hf = HuggingFacePipeline(pipeline=pipe)
    Nr   pipelinestrmodel_idOptional[dict]model_kwargspipeline_kwargsint
batch_sizeforbid)extradefaultc	                6   	 ddl m}
m}m} ddl m} |xs i } |j                  |fi |}	 |dk(  r5|dk(  r	 ddlm} 	  |j                  |fi |}nc |
j                  |fi |}nO|dv r5|dk(  r	 ddlm
} 	  |j                  |fi |}n* |j                  |fi |}nt        d| dt         d      |j                  |j                  j                  |j                  j                  |_        n|j                  j                   @t#        |j                  j                   t$              r|j                  j                   |_        n1|j                   |j                   |_        n|j'                  ddi       t)        |dd      st)        |dd      r"| |dk(  rt*        j-                  d| d       d}|t.        j0                  j3                  d      j|dk(  reddl}|j6                  j9                         }|dk  s||k\  rt        d| d| d      ||dk  rd}| |dk  r|dkD  rt*        j-                  d |       |||dk(  rt*        j-                  d!       d"|v r)|j;                         D ci c]  \  }}|d"k7  s|| }}}|xs i } |d%|||||||d#|}|j<                  t        vr t        d|j<                   dt         d       | d%|||||d$|	S # t
        $ r t        d      w xY w# t
        $ r t        d      w xY w# t        $ r  |j                  |fd	d
i|}Y ~w xY w# t
        $ r t        d      w xY w# t        $ r  |j                  |fd	d
i|}Y w xY w# t
        $ r}t        d| d      |d}~ww xY wc c}}w )&z5Construct the pipeline object from model_id and task.r   )AutoModelForCausalLMAutoModelForSeq2SeqLMAutoTokenizer)r   z`Could not import transformers python package. Please install it with `pip install transformers`.r   openvino)OVModelForCausalLMzlCould not import optimum-intel python package. Please install it with: pip install 'optimum[openvino,nncf]' exportT)r   r   r   )OVModelForSeq2SeqLMGot invalid task , currently only  are supportedzCould not load the z# model due to missing dependencies.N	pad_tokenz[PAD]is_loaded_in_4bitFis_loaded_in_8bitr%   z+Setting the `device` argument to None from z to avoid the error caused by attempting to move the model that was already loaded on the GPU using the Accelerate module to the same or another device.torchr&   zGot device==z', device is required to be within [-1, )zDevice has %d GPUs available. Provide device={deviceId} to `from_model_id` to use availableGPUs for execution. deviceId is -1 (default) for CPU and can be a positive integer associated with CUDA device id.z6Please set device for OpenVINO through: `model_kwargs`trust_remote_code)taskmodel	tokenizerdevice
device_mapr"   r   )r   r   r   r    r"    )transformersr(   r)   r*   r   ImportErrorfrom_pretrainedoptimum.intel.openvinor,   	Exceptionr.   
ValueErrorVALID_TASKSr2   configpad_token_ideos_token_id
isinstancer!   add_special_tokensgetattrloggerwarning	importlibutil	find_specr5   cudadevice_countitemsr8   )clsr   r8   backendr;   r<   r   r    r"   kwargsr(   r)   r*   hf_pipeline_model_kwargsr:   r,   r9   r.   er5   cuda_device_countkv_pipeline_kwargsr   s                             l/var/www/html/test/engine/venv/lib/python3.12/site-packages/langchain_community/llms/huggingface_pipeline.pyfrom_model_idz!HuggingFacePipeline.from_model_idM   s   	 
 = %*1M11(LmL	=	((j(M
 B 2 B B$!(5! A0@@ $1E QQj(N
 C 3 C C$!(5! B1AA $1E !'v .&&1].B  &||((4).)B)B	&**6:))3< */)B)B	&''3)2)?)?	&,,k7-CD 2E:5"5u="9$NN=fX F" " F ((1=9$ %

 7 7 9{v):: "6( +<<M;NaQ  %&1*!fqj5F5JP & *"8W
=RNNST-/!.!4!4!6A!?R:R1M  +0b 	
!!&	
 	
 ==+#HMM? 3""-n>   
&,!
 
 	
c  	E 	 ' )D  %  B 2 B B$!-1!5B! ' )D  %  C 3 C C$!-1!5B!  	%dV+NO	vs   K" 
M4 K: L M4 0L7 7M 
+M4 6NN"K7:LM4 L40M4 3L44M4 7MM4 M1-M4 0M11M4 4	N=NNc                J    | j                   | j                  | j                  dS )zGet the identifying parameters.r   r   r    r`   selfs    r]   _identifying_paramsz'HuggingFacePipeline._identifying_params   s'      --#33
 	
    c                     y)Nhuggingface_pipeliner=   ra   s    r]   	_llm_typezHuggingFacePipeline._llm_type  s    %rd   c           	     P   g }| j                   r| j                   ni }|j                  d|      }|j                  dd      }t        dt        |      | j                        D ]  }	||	|	| j                  z    }
 | j
                  |
fi |}t        |      D ]  \  }}t        |t              r|d   }| j
                  j                  dk(  r|d   }n| j
                  j                  dk(  r|d   }ng| j
                  j                  dk(  r|d	   }nH| j
                  j                  d
v r|d   }n*t        d| j
                  j                   dt         d      |r|t        |
|         d  }|j                  |         t        |D cg c]  }t        |      g c}      S c c}w )Nr    skip_promptFr   r   generated_textr   r   summary_textr   translation_textr/   r0   r1   text)generations)r    getrangelenr"   r   	enumeraterH   listr8   rC   rD   appendr   r   )rb   promptsstoprun_managerrU   text_generationsdefault_pipeline_kwargsr    ri   ibatch_prompts	responsesjresponsern   s                  r]   	_generatezHuggingFacePipeline._generate
  s    '):>:N:N$"6"6TV **%68OPjj6q#g,8 	.A#ADOO(;<M &!I  )3 .8h-'{H==%%)::#$45D]]''+AA#$45D]]''?:#N3D]]''=8#$67D$+DMM,>,>+? @**5nF  M!$4 5 78D ''-+.	.B =MNT*$/0N
 	
Ns   F#c              +    K   ddl m} dd l}ddlm}m}m}	 |j                  di       }
|j                  dd      }|%| j                  j                  j                  |      }|xs g  G fdd|      } | |       g      }| j                  j                  |d	
      } |	| j                  j                  d|d      }t        |f||d|
} || j                  j                  j                  |      }|j                          |D ]1  }t        |      }|r|j!                  |j"                  |       | 3 y w)Nr   )Thread)StoppingCriteriaStoppingCriteriaListTextIteratorStreamerr    ri   Tc                  *    e Zd Z	 	 	 	 	 	 	 	 d fdZy)1HuggingFacePipeline._stream.<locals>.StopOnTokensc                .    D ]  }|d   d   |k(  s y y)Nr   r&   TFr=   )rb   	input_idsscoresrU   stop_idstopping_ids_lists        r]   __call__z:HuggingFacePipeline._stream.<locals>.StopOnTokens.__call__V  s-      1 $G |B'72#$ rd   N)r   ztorch.LongTensorr   ztorch.FloatTensorrU   r   returnbool)__name__
__module____qualname__r   )r   s   r]   StopOnTokensr   U  s.    	+	 *	 		
 	rd   r   pt)return_tensorsg      N@)timeoutri   skip_special_tokens)streamerstopping_criteria)targetrU   rm   )chunk)	threadingr   r5   r>   r   r   r   rp   r   r:   convert_tokens_to_idsdictr9   generatestartr   on_llm_new_tokenrn   )rb   promptrw   rx   rU   r   r5   r   r   r   r    ri   r   r   inputsr   generation_kwargst1charr   r   s                       @r]   _streamzHuggingFacePipeline._stream>  sC     	%	
 	
 !**%6;jj5==**@@FD JB
	+ 
	 1,.1AB(((E'MM### $	
 !
/
 	
 4==..77@QR

 	D#.E,,UZZu,EK	s   D<D?)r   r   r8   r   rT   r   r;   zOptional[int]r<   zOptional[str]r   r   r    r   r"   r!   rU   r   r   r   )r   zMapping[str, Any])r   r   )NN)
rv   z	List[str]rw   Optional[List[str]]rx   "Optional[CallbackManagerForLLMRun]rU   r   r   r   )
r   r   rw   r   rx   r   rU   r   r   zIterator[GenerationChunk])r   r   r   __doc__r   __annotations__DEFAULT_MODEL_IDr   r   r    DEFAULT_BATCH_SIZEr"   r   model_configclassmethodr^   propertyrc   rg   r   r   r=   rd   r]   r   r      s   > Hc$Hc$#'L.'0&*O^*3(J(HL 
 ! "$('+*.,m
m
 m
 	m

 m
 "m
 %m
 (m
 m
 m
 
m
 m
^ 
 
 & & %):>	2
2
 "2
 8	2

 2
 
2
n %):>	:: ": 8	:
 : 
#:rd   r   )
__future__r   importlib.utilrM   loggingtypingr   r   r   r   r   langchain_core._api.deprecationr	   langchain_core.callbacksr
   #langchain_core.language_models.llmsr   langchain_core.outputsr   r   r   pydanticr   r   DEFAULT_TASKrD   r   	getLoggerr   rK   r   r=   rd   r]   <module>r      s~    "   9 9 6 = 7 I I     			8	$ 
B
Y' Y
Yrd   