
    '}hD                     2   d dl Z d dl mZ ddlmZmZmZmZmZmZm	Z	 d dl
mZmZ ddgZ G d de      Zd	d
e de de de	 d	z   dz   e_        	 	 	 	 	 d"dee   dee   deee      dedee   dee   dee   dee   dedededededefdZdee   dee   deee      dee   dee   dededededededefdZdee   dee   deee      dee   dee   dededededededefdZdee   dee   deee      dee   dee   dededededededed dfd!Zy)#    N)Tensor   )	Optimizer_use_grad_for_differentiable_default_to_fused_or_foreach_differentiable_doc_foreach_doc_maximize_doc
_fused_doc)ListOptionalSGDsgdc                   p     e Zd Z	 	 dddddddedee   dedee   f fdZ fd	Zd
 Zedd       Z	 xZ
S )r   FN)maximizeforeachdifferentiablefusedr   r   r   r   c                6   |dk  rt        d|       |dk  rt        d|       |dk  rt        d|       t        ||||||||	|
	      }|r|dk  s|dk7  rt        d      t        |   ||       |
r"d| _        |	rt        d	      |rt        d
      y y )Ng        zInvalid learning rate: zInvalid momentum value: zInvalid weight_decay value: )	lrmomentum	dampeningweight_decaynesterovr   r   r   r   r   z8Nesterov momentum requires a momentum and zero dampeningTz)`fused` does not support `differentiable`z0`fused` and `foreach` cannot be `True` together.)
ValueErrordictsuper__init___step_supports_amp_scalingRuntimeError)selfparamsr   r   r   r   r   r   r   r   r   defaults	__class__s               N/var/www/html/test/engine/venv/lib/python3.12/site-packages/torch/optim/sgd.pyr   zSGD.__init__   s     86rd;<<c>7zBCC#;L>JKK2I%1H!)7'5UD Q)q.WXX*.2D+"#NOO"#UVV 	     c                     t         |   |       | j                  D ]\  }|j                  dd       |j                  dd       |j                  dd        |j                  dd       |j                  dd       ^ y )Nr   Fr   r   r   r   )r   __setstate__param_groups
setdefault)r!   stategroupr$   s      r%   r(   zSGD.__setstate__$   sv    U#&& 	-EZ/Z/Y--u5We,	-r&   c                    d}|d   D ]  }|j                   |j                  |       |j                  |j                          |j                   j                  rd}| j                  |   }|j                  |j	                  d              |S )NFr"   Tmomentum_buffer)gradappend	is_sparser+   get)r!   r,   params_with_gradd_p_listmomentum_buffer_listhas_sparse_gradpr+   s           r%   _init_groupzSGD._init_group-   s    x 	JAvv! ''*'66##&*O

1$++EII6G,HI	J r&   c                    d}|$t        j                         5   |       }ddd       | j                  D ]  }g }g }g }| j                  ||||      }t	        ||||d   |d   |d   |d   |d   |d   ||d   |d	   t        | d
d      t        | dd             t        ||      D ]  \  }}	| j                  |   }
|	|
d<     |S # 1 sw Y   xY w)zPerforms a single optimization step.

        Args:
            closure (Callable, optional): A closure that reevaluates the model
                and returns the loss.
        Nr   r   r   r   r   r   r   r   
grad_scale	found_inf)r   r   r   r   r   r   r6   r   r   r:   r;   r.   )torchenable_gradr)   r8   r   getattrzipr+   )r!   closurelossr,   r3   r4   r5   r6   r7   r.   r+   s              r%   stepzSGD.step<   s    ""$ !y! && 	;E!H#% "..u6FRfgO $">2z*;,z*z* /i(Gn"4t<!$T:<  '**:<P&Q ;"?

1+:'(;/	;6 =! !s   CC)gMbP?r   r   r   FN)__name__
__module____qualname__boolr   r   r(   r8   r   rB   __classcell__)r$   s   @r%   r   r   
   sl    >?*/WEJfj(-tW>BWU]^bUcW!%W6>tnW2- "' "'r&   a  Implements stochastic gradient descent (optionally with momentum).

    .. math::
       \begin{aligned}
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{input}      : \gamma \text{ (lr)}, \: \theta_0 \text{ (params)}, \: f(\theta)
                \text{ (objective)}, \: \lambda \text{ (weight decay)},                          \\
            &\hspace{13mm} \:\mu \text{ (momentum)}, \:\tau \text{ (dampening)},
            \:\textit{ nesterov,}\:\textit{ maximize}                                     \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{for} \: t=1 \: \textbf{to} \: \ldots \: \textbf{do}                         \\
            &\hspace{5mm}g_t           \leftarrow   \nabla_{\theta} f_t (\theta_{t-1})           \\
            &\hspace{5mm}\textbf{if} \: \lambda \neq 0                                           \\
            &\hspace{10mm} g_t \leftarrow g_t + \lambda  \theta_{t-1}                            \\
            &\hspace{5mm}\textbf{if} \: \mu \neq 0                                               \\
            &\hspace{10mm}\textbf{if} \: t > 1                                                   \\
            &\hspace{15mm} \textbf{b}_t \leftarrow \mu \textbf{b}_{t-1} + (1-\tau) g_t           \\
            &\hspace{10mm}\textbf{else}                                                          \\
            &\hspace{15mm} \textbf{b}_t \leftarrow g_t                                           \\
            &\hspace{10mm}\textbf{if} \: \textit{nesterov}                                       \\
            &\hspace{15mm} g_t \leftarrow g_{t} + \mu \textbf{b}_t                             \\
            &\hspace{10mm}\textbf{else}                                                   \\[-1.ex]
            &\hspace{15mm} g_t  \leftarrow  \textbf{b}_t                                         \\
            &\hspace{5mm}\textbf{if} \: \textit{maximize}                                          \\
            &\hspace{10mm}\theta_t \leftarrow \theta_{t-1} + \gamma g_t                   \\[-1.ex]
            &\hspace{5mm}\textbf{else}                                                    \\[-1.ex]
            &\hspace{10mm}\theta_t \leftarrow \theta_{t-1} - \gamma g_t                   \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
            &\bf{return} \:  \theta_t                                                     \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
       \end{aligned}

    Nesterov momentum is based on the formula from
    `On the importance of initialization and momentum in deep learning`__.
    a  
    Args:
        params (iterable): iterable of parameters to optimize or dicts defining
            parameter groups
        lr (float, optional): learning rate (default: 1e-3)
        momentum (float, optional): momentum factor (default: 0)
        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
        dampening (float, optional): dampening for momentum (default: 0)
        nesterov (bool, optional): enables Nesterov momentum (default: False)
        z	
        z
    a  

    Example:
        >>> # xdoctest: +SKIP
        >>> optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9)
        >>> optimizer.zero_grad()
        >>> loss_fn(model(input), target).backward()
        >>> optimizer.step()

    __ http://www.cs.toronto.edu/%7Ehinton/absps/momentum.pdf

    .. note::
        The implementation of SGD with Momentum/Nesterov subtly differs from
        Sutskever et. al. and implementations in some other frameworks.

        Considering the specific case of Momentum, the update can be written as

        .. math::
            \begin{aligned}
                v_{t+1} & = \mu * v_{t} + g_{t+1}, \\
                p_{t+1} & = p_{t} - \text{lr} * v_{t+1},
            \end{aligned}

        where :math:`p`, :math:`g`, :math:`v` and :math:`\mu` denote the
        parameters, gradient, velocity, and momentum respectively.

        This is in contrast to Sutskever et. al. and
        other frameworks which employ an update of the form

        .. math::
            \begin{aligned}
                v_{t+1} & = \mu * v_{t} + \text{lr} * g_{t+1}, \\
                p_{t+1} & = p_{t} - v_{t+1}.
            \end{aligned}

        The Nesterov version is analogously modified.

        Moreover, the initial value of the momentum buffer is set to the
        gradient value at the first step. This is in contrast to some other
        frameworks that initialize it to all zeros.

    r"   r4   r5   r6   r   r   r:   r;   r   r   r   r   r   r   c                    |6|4t         j                  j                         st        | dd      \  }}nd}d}|d}|d}|r)t         j                  j                         rt	        d      |r)t         j                  j                         rt	        d      |r%t         j                  j                         st
        }n-|r%t         j                  j                         st        }nt        } || ||||	|
||||||       y)zlFunctional API that performs SGD algorithm computation.

    See :class:`~torch.optim.SGD` for details.
    NF)r   	use_fusedz6torch.jit.script not supported with foreach optimizersz4torch.jit.script not supported with fused optimizers)	r   r   r   r   r   r6   r   r:   r;   )r<   jitis_scriptingr   r    _multi_tensor_sgd
_fused_sgd_single_tensor_sgd)r"   r4   r5   r6   r   r   r:   r;   r   r   r   r   r   r   funcs                  r%   r   r      s    4 5= yy%%'9&QVbghNE7GE}599))+STT'')QRRuyy--/ 	uyy--/!		"(r&   c                   ||J t        |       D ]  \  }}|
s||   n||    }|dk7  r|j                  ||      }|dk7  rm||   }|)t        j                  |      j	                         }|||<   n%|j                  |      j                  |d|z
         |	r|j                  ||      }n|}|j                  ||         y )Nr   alphar   )	enumerateaddr<   clonedetachmul_add_)r"   r4   r5   r:   r;   r   r   r   r   r   r   r6   iparamd_pbufs                   r%   rO   rO     s     )"333f% #5!)hqk|1''%|'4Cq=&q)C{kk#&--/*-$Q'"''1y='Aggcg2

3rc
")#r&   gradsc                N   ||J t        |       dk(  ry t        j                  | ||gd      }|j                         D ]  \  \  }}}}|xr t	        d |D              }|
rt        j                  |      }|dk7  r3|
rt        j                  |||       nt        j                  |||      }|dk7  rg }d}t        t        |            D ]  }||   d} n|j                  ||          ! |r2t        j                  ||       t        j                  ||d|z
         ng }t        t        |            D ]y  }||   4t        j                  ||         j                         x}x||<   |||   <   n-||   }|j                  |      j                  ||   d|z
         |j                  |       { |	rt        j                  |||       n|}|st        j                  |||        t        t        |            D ]  }||   j                  ||   |          y )Nr   Twith_indicesc              3   4   K   | ]  }|j                     y wrC   )r1   ).0r/   s     r%   	<genexpr>z$_multi_tensor_sgd.<locals>.<genexpr><  s     8aD8as   rR   Fr   )lenr   "_group_tensors_by_device_and_dtypevaluesanyr<   _foreach_neg_foreach_add__foreach_addranger0   _foreach_mul_rV   rW   rX   rY   )r"   r^   r5   r:   r;   r   r   r   r   r   r   r6   grouped_tensorsdevice_paramsdevice_gradsdevice_momentum_buffer_listindicesdevice_has_sparse_gradbufsall_states_with_momentum_bufferrZ   r]   s                         r%   rM   rM   (  sT    )"333
6{aBBFESgChw{|OQ`QgQgQi 1BM	C-'BW!0!aS8aT`8a5a --l;L1##L-|T$11,Uabq=D.2+3:;< @.q196;3KK ;A >?@ /##D(3##D,a)mLs#>?@ %A215=!KKQ8??AB B9!<?ST[\]T^?_ :!<*//Qq9}/UKK$% ##L$hG#%|B3G 3}-. Ba %%l1obS%ABa1Br&   returnc                F   | sy |rt        d      ||j                  |ind }||j                  |ind }|dk(  }t        d |D              xr | }|r+t        |      D ]  \  }}t	        j
                  |      ||<    t        j                  | ||gd      }|j                         D ]w  \  \  }}\  \  }}}}d\  }}|||vr|j                  |      ||<   ||   }|||vr|j                  |      ||<   ||   }t	        j                  |||rg n||||||	|
|||       y y )Nz.`_fused_sgd` does not support sparse gradientsr   c              3   $   K   | ]  }|d u  
 y wrC    )rc   ts     r%   rd   z_fused_sgd.<locals>.<genexpr>  s     @aT	@s   Fr`   )NN)	r   r   r   r   r   r   is_first_stepr:   r;   )r    deviceallrT   r<   
empty_liker   rf   itemsto_fused_sgd_)r"   r^   r5   r:   r;   r   r   r   r   r   r   r6   grad_scale_dictfound_inf_dictno_momentum_bufferr{   rZ   grn   r|   dtypero   rp   rq   _device_grad_scaledevice_found_infs                              r%   rN   rN   o  s    KLL9C9Oz((*5UYO6?6Ki&&	2QUN!Q@+?@@[I[E[Me$ 	:DAq&+&6&6q&9 #	:BB	,-ECO\k\q\q\s 
XXT=,8SVW.8++!_,*4--*?' / 7 ^+)2f)=v&-f5$B*E%'(&	

r&   )NNNNN)r<   r   	optimizerr   r   r   r   r	   r
   r   typingr   r   __all__r   __doc__rG   floatr   rO   rM   rN   ry   r&   r%   <module>r      s*    V V V !%.Z) Zz"D		 
 		 		 		 E/^)_X@ !%"& $'+&*>V >v,>"8F#34>
 > $> ~> V$> F#> > > > > >  !>@##tF| ##!%f##-1(62B-C## $,F#3## #+6"2	## &+## "'## !## #(## "&## "&## )-##LDBd6l DB!&\DB,0&1A,BDB #+6"2DB "*&!1	DB %*DB !&DB  DB "'DB !%DB !%DB (,DBN4
L4
<4
 x/04
  	4

 4
 4
 4
 	4
 4
 4
 4
 4
 
4
r&   