
    |h                        d Z ddlZddlZddlmc mZ ddlmZ ddl	m
Z
 ddlmZ ddlmZ dZdZd	ZeeegZ ed
       G d d             Z eddg        G d de             Z eddg        G d de             Z eddg        G d de             Z eddg        G d de             Z eddg        G d  d!e             Z ed"d#g        G d$ d%e             Z ed&d'g        G d( d)e             Z ed*d+g        G d, d-e             Z ed.d/g        G d0 d1e             Z ed2d3g        G d4 d5e             Z ed6d7g        G d8 d9e             Z ed:d;g        G d< d=e             Z ed>d?g        G d@ dAe             Z  edBdCg        G dD dEe             Z! edFdGg        G dH dIe             Z"dJ Z#dK Z$dL Z%dOdMZ&dN Z'y)PzKeras initializers.    N)backend)utils)serialization_lib)keras_exportpartition_shapepartition_offsetlayoutzkeras.initializers.Initializerc                   4    e Zd ZdZddZd Zed        Zd Zy)Initializera  Initializer base class: all TF-Keras initializers inherit from this
    class.

    Initializers should implement a `__call__()` method with the following
    signature:

    ```python
    def __call__(self, shape, dtype=None, **kwargs):
        # returns a tensor of shape `shape` and dtype `dtype`
        # containing values drawn from a distribution of your choice.
        return tf.random.uniform(shape=shape, dtype=dtype)
    ```

    Optionally, you an also implement the method `get_config()` and the class
    method `from_config()` in order to support serialization -- just like with
    any TF-Keras object.

    Here's a simple example: a random normal initializer.

    ```python
    class ExampleRandomNormal(Initializer):
        def __init__(self, mean, stddev):
            self.mean = mean
            self.stddev = stddev

        def __call__(self, shape, dtype=None, **kwargs):
            return tf.random.normal(
                shape, mean=self.mean, stddev=self.stddev, dtype=dtype
            )

        def get_config(self):  # To support serialization
            return {"mean": self.mean, "stddev": self.stddev}
    ```

    Note that we don't have to implement `from_config()` in the example above
    since the constructor arguments of the class the keys in the config returned
    by `get_config` are the same. In this case, the default `from_config()`
    works fine.
    Nc                     t        d      )zReturns a tensor object initialized as specified by the initializer.

        Args:
          shape: Shape of the tensor.
          dtype: Optional dtype of the tensor.
          **kwargs: Additional keyword arguments.
        z>Initializer subclasses must implement the `__call__()` method.)NotImplementedError)selfshapedtypekwargss       e/var/www/html/test/engine/venv/lib/python3.12/site-packages/tf_keras/src/initializers/initializers.py__call__zInitializer.__call__M   s     "L
 	
    c                     i S )zReturns the initializer's configuration as a JSON-serializable dict.

        Returns:
            A JSON-serializable Python dict.
         r   s    r   
get_configzInitializer.get_configY   s	     	r   c                 6    |j                  dd        | di |S )a  Instantiates an initializer from a configuration dictionary.

        Example:

        ```python
        initializer = RandomUniform(-1, 1)
        config = initializer.get_config()
        initializer = RandomUniform.from_config(config)
        ```

        Args:
            config: A Python dictionary, the output of `get_config()`.

        Returns:
            An `Initializer` instance.
        r   Nr   )popclsconfigs     r   from_configzInitializer.from_configa   s    $ 	

7D!}V}r   c                     t        | dd      r<t        | dd       .t        j                  d| j                  j                   d       y y d| _        y )N_usedFseedzThe initializer z is unseeded and being called multiple times, which will return identical values each time (even if the initializer is unseeded). Please update your code to provide a seed to the initializer, or avoid using the same initializer instance more than once.T)getattrwarningswarn	__class____name__r    r   s    r   _warn_reusezInitializer._warn_reusev   sT    4%(tVT*2&t~~'>'>&? @/ / 3 DJr   N)	r&   
__module____qualname____doc__r   r   classmethodr   r'   r   r   r   r   r   #   s+    &P

  (r   r   zkeras.initializers.Zeroszkeras.initializers.zeros)v1c                       e Zd ZdZddZy)Zerosa  Initializer that generates tensors initialized to 0.

    Also available via the shortcut function `tf.keras.initializers.zeros`.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.Zeros()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.Zeros()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)
    Nc                 x   t        | j                  j                  |       t        |      }|j                  r|t
        j                  k(  rt        d| d      t        |v r	|t           }|j                  dd      }|r't        j                  t
        j                  |||      S t        j                  ||      S a  Returns a tensor object initialized as specified by the initializer.

        Args:
            shape: Shape of the tensor.
            dtype: Optional dtype of the tensor. Only numeric or boolean dtypes
                are supported. If not specified, `keras.backend.floatx()` is
                used, which defaults to `float32` unless you configured it
                otherwise (via `keras.backend.set_floatx(float_dtype)`).
            **kwargs: Additional keyword arguments.
        z'Expected numeric or boolean dtype, got .r	   Nr   r   )_validate_kwargsr%   r&   
_get_dtypeis_numpy_compatibletfstring
ValueError_PARTITION_SHAPEr   r   call_with_layoutzerosr   r   r   r   r	   s        r   r   zZeros.__call__   s     	00&95!((ERYY,>FugQOPPv%+,EHd+))&U  xxu%%r   r(   r&   r)   r*   r+   r   r   r   r   r/   r/      s    &r   r/   zkeras.initializers.Oneszkeras.initializers.onesc                       e Zd ZdZddZy)Onesa  Initializer that generates tensors initialized to 1.

    Also available via the shortcut function `tf.keras.initializers.ones`.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.Ones()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.Ones()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)
    Nc                 x   t        | j                  j                  |       t        |      }|j                  r|t
        j                  k(  rt        d| d      t        |v r	|t           }|j                  dd      }|r't        j                  t
        j                  |||      S t        j                  ||      S r1   )r4   r%   r&   r5   r6   r7   r8   r9   r:   r   r   r;   onesr=   s        r   r   zOnes.__call__   s     	00&95!((ERYY,>FugQOPPv%+,EHd+))uE  wwue$$r   r(   r>   r   r   r   r@   r@      s    %r   r@   zkeras.initializers.Constantzkeras.initializers.constantc                   6    e Zd ZdZddZddZd Zed        Zy)	Constanta  Initializer that generates tensors with constant values.

    Also available via the shortcut function `tf.keras.initializers.constant`.

    Only scalar values are allowed.
    The constant value provided must be convertible to the dtype requested
    when calling the initializer.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.Constant(3.)
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.Constant(3.)
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
        value: A Python scalar.
    c                     || _         y r(   value)r   rG   s     r   __init__zConstant.__init__   s	    
r   Nc                 \   t        | j                  j                  |       t        |      }t        |v r	|t           }|j                  dd      }|r2t        j                  t        j                  || j                  ||      S t        j                  | j                  t        |      |      S )a  Returns a tensor object initialized to `self.value`.

        Args:
            shape: Shape of the tensor.
            dtype: Optional dtype of the tensor. If not specified,
                `keras.backend.floatx()` is used,
                which defaults to `float32` unless you configured it
                otherwise (via `keras.backend.set_floatx(float_dtype)`).
                **kwargs: Additional keyword arguments.
        r	   Nr3   )r   r   )r4   r%   r&   r5   r:   r   r   r;   r7   constantrG   r=   s        r   r   zConstant.__call__   s     	00&95!v%+,EHd+))VTZZuE  {{4::Z->eLLr   c                     d| j                   iS )NrG   rF   r   s    r   r   zConstant.get_config  s    $$r   c                     |j                  dd        d|v r.t        |d   t              rt        j                  |d         |d<    | di |S )Nr   rG   r   )r   
isinstancedictr   deserialize_keras_objectr   s     r   r   zConstant.from_config  sO    

7D!f&/40"3"L"L7O#w }V}r   )r   r(   )	r&   r)   r*   r+   rH   r   r   r,   r   r   r   r   rD   rD      s+    ,M,%  r   rD   z keras.initializers.RandomUniformz!keras.initializers.random_uniformc                   &    e Zd ZdZddZddZd Zy)RandomUniforma  Initializer that generates tensors with a uniform distribution.

    Also available via the shortcut function
    `tf.keras.initializers.random_uniform`.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.RandomUniform(minval=0., maxval=1.)
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.RandomUniform(minval=0., maxval=1.)
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      minval: A python scalar or a scalar tensor. Lower bound of the range of
        random values to generate (inclusive).
      maxval: A python scalar or a scalar tensor. Upper bound of the range of
        random values to generate (exclusive).
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will produce the same
        random values across multiple calls.
    Nc                 f    || _         || _        || _        t        j                  |d      | _        y N	statelessrng_type)minvalmaxvalr!   r   RandomGenerator_random_generator)r   rW   rX   r!   s       r   rH   zRandomUniform.__init__9  s/    	!(!8!8;"
r   c           	      ^   t        | j                  j                  |       t        |      }|j                  s|j
                  st        d| d      t        |v r	|t           }|j                  t        d      }|| j                          |rt        |      nd}|j                  dd      }|rMt                t        j                  | j                   j"                  ||| j$                  | j&                  ||      S | j                   j#                  || j$                  | j&                  ||      S )a  Returns a tensor object initialized as specified by the initializer.

        Args:
          shape: Shape of the tensor.
          dtype: Optional dtype of the tensor. Only floating point and integer
          types are supported. If not specified,
            `tf.keras.backend.floatx()` is used,
           which default to `float32` unless you configured it otherwise
           (via `tf.keras.backend.set_floatx(float_dtype)`).
          **kwargs: Additional keyword arguments.
        z%Expected float or integer dtype, got r2   Nr	   )r4   r%   r&   r5   is_floating
is_integerr9   r:   get_PARTITION_OFFSETr'   hashr   _ensure_keras_seededr   r;   rZ   random_uniformrW   rX   r   r   r   r   r   noncer	   s          r   r   zRandomUniform.__call__A  s    	00&95!  )9)9DUG1MNNv%+,E!::&7># *:%&Hd+ "))&&55  %%444;;UE
 	
r   c                 J    | j                   | j                  | j                  dS )NrW   rX   r!   rf   r   s    r   r   zRandomUniform.get_configi  s    ++diiPPr   )g皙?Nr(   r&   r)   r*   r+   rH   r   r   r   r   r   rQ   rQ     s    2
&
PQr   rQ   zkeras.initializers.RandomNormalz keras.initializers.random_normalc                   &    e Zd ZdZddZddZd Zy)RandomNormala  Initializer that generates tensors with a normal distribution.

    Also available via the shortcut function
    `tf.keras.initializers.random_normal`.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.RandomNormal(mean=0., stddev=1.)
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.RandomNormal(mean=0., stddev=1.)
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      mean: a python scalar or a scalar tensor. Mean of the random values to
        generate.
      stddev: a python scalar or a scalar tensor. Standard deviation of the
        random values to generate.
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will produce the same
        random values across multiple calls.
    Nc                 f    || _         || _        || _        t        j                  |d      | _        y rS   meanstddevr!   r   rY   rZ   r   rm   rn   r!   s       r   rH   zRandomNormal.__init__  /    		!(!8!8;"
r   c           	      "   t        | j                  j                  |       t        t	        |            }t
        |v r	|t
           }|j                  t        d      }|| j                          |rt        |      nd}|j                  dd      }|rMt                t        j                  | j                  j                  ||| j                   | j"                  ||      S | j                  j                  || j                   | j"                  ||      S )a  Returns a tensor object initialized to random normal values.

        Args:
          shape: Shape of the tensor.
          dtype: Optional dtype of the tensor. Only floating point types are
            supported. If not specified, `tf.keras.backend.floatx()` is used,
            which default to `float32` unless you configured it otherwise (via
            `tf.keras.backend.set_floatx(float_dtype)`)
          **kwargs: Additional keyword arguments.
        Nr	   )r4   r%   r&   _assert_float_dtyper5   r:   r^   r_   r'   r`   r   ra   r   r;   rZ   random_normalrm   rn   rc   s          r   r   zRandomNormal.__call__  s     	00&9#Ju$56v%+,E!::&7># *:%&Hd+ "))&&44		  %%33499dkk5%
 	
r   c                 J    | j                   | j                  | j                  dS Nrm   rn   r!   rv   r   s    r   r   zRandomNormal.get_config      		T[[$))LLr           rg   Nr(   rh   r   r   r   rj   rj   m  s    2
#
JMr   rj   z"keras.initializers.TruncatedNormalz#keras.initializers.truncated_normalc                   &    e Zd ZdZddZddZd Zy)TruncatedNormala  Initializer that generates a truncated normal distribution.

    Also available via the shortcut function
    `tf.keras.initializers.truncated_normal`.

    The values generated are similar to values from a
    `tf.keras.initializers.RandomNormal` initializer except that values more
    than two standard deviations from the mean are
    discarded and re-drawn.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.TruncatedNormal(mean=0., stddev=1.)
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.TruncatedNormal(mean=0., stddev=1.)
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      mean: a python scalar or a scalar tensor. Mean of the random values
        to generate.
      stddev: a python scalar or a scalar tensor. Standard deviation of the
        random values to generate before truncation.
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will produce the same
        random values across multiple calls.
    Nc                 f    || _         || _        || _        t        j                  |d      | _        y rS   rl   ro   s       r   rH   zTruncatedNormal.__init__  rp   r   c           	      l   t        | j                  j                  |       t        t	        |            }t
        |v r	|t
           }|j                  t        d      }|| j                          |rt        |      nd}|j                  dd      }|rr| j                  j                  | j                  _        t                t        j                   | j                  j"                  ||| j$                  | j&                  ||      S | j                  j#                  || j$                  | j&                  ||      S )a  Returns a tensor initialized to random normal values (truncated).

        Args:
          shape: Shape of the tensor.
          dtype: Optional dtype of the tensor. Only floating point types are
            supported. If not specified, `tf.keras.backend.floatx()` is used,
            which default to `float32` unless you configured it otherwise (via
            `tf.keras.backend.set_floatx(float_dtype)`)
          **kwargs: Additional keyword arguments.
        Nr	   )r4   r%   r&   rr   r5   r:   r^   r_   r'   r`   r   rZ   RNG_STATEFUL	_rng_typera   r   r;   truncated_normalrm   rn   rc   s          r   r   zTruncatedNormal.__call__  s    	00&9#Ju$56v%+,E!::&7># *:%&Hd+ &&33 "", !"))&&77		  %%66499dkk5%
 	
r   c                 J    | j                   | j                  | j                  dS ru   rv   r   s    r   r   zTruncatedNormal.get_config  rw   r   rx   r(   rh   r   r   r   r{   r{     s    <
(
TMr   r{   z"keras.initializers.VarianceScalingz#keras.initializers.variance_scalingc                   4    e Zd ZdZ	 	 	 	 ddZddZd Zd Zy)	VarianceScalinga  Initializer that adapts its scale to the shape of its input tensors.

    Also available via the shortcut function
    `tf.keras.initializers.variance_scaling`.

    With `distribution="truncated_normal" or "untruncated_normal"`, samples are
    drawn from a truncated/untruncated normal distribution with a mean of zero
    and a standard deviation (after truncation, if used) `stddev = sqrt(scale /
    n)`, where `n` is:

    - number of input units in the weight tensor, if `mode="fan_in"`
    - number of output units, if `mode="fan_out"`
    - average of the numbers of input and output units, if `mode="fan_avg"`

    With `distribution="uniform"`, samples are drawn from a uniform distribution
    within `[-limit, limit]`, where `limit = sqrt(3 * scale / n)`.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.VarianceScaling(
    ... scale=0.1, mode='fan_in', distribution='uniform')
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.VarianceScaling(
    ... scale=0.1, mode='fan_in', distribution='uniform')
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
        scale: Scaling factor (positive float).
        mode: One of `"fan_in"`, `"fan_out"`, `"fan_avg"`.
        distribution: Random distribution to use. One of `"truncated_normal"`,
            `"untruncated_normal"`, or `"uniform"`.
        seed: A Python integer. Used to make the behavior of the initializer
            deterministic. Note that a seeded initializer will produce the same
            random values across multiple calls.
    Nc                 2   |dk  rt        d| d      h d}||vrt        d| d| d      |j                         }|dk(  rd}h d	}||vrt        d
| d| d      || _        || _        || _        || _        t        j                  |d      | _        y )Nry   z0`scale` must be positive float. Received: scale=r2   >   fan_infan_avgfan_outzInvalid `mode` argument: z. Please use one of the normalr   >   uniformr   untruncated_normalz!Invalid `distribution` argument: z.Allowed distributions: rT   rU   )	r9   lowerscalemodedistributionr!   r   rY   rZ   )r   r   r   r   r!   allowed_modesallowed_distributionss          r   rH   zVarianceScaling.__init__B  s     C<B5'K  9}$+D6 2))6q:  $))+8#-L!

 443L> B**?)@C  
	(	!(!8!8;"
r   c                    t        | j                  j                  |       t        t	        |            }t
        |v r	|t
           }|j                  t        d      }|| j                          |rt        |      nd}|j                  dd      }|r.t                t        j                  | j                  ||||      S | j                  |||      S )a  Returns a tensor object initialized as specified by the initializer.

        Args:
          shape: Shape of the tensor.
          dtype: Optional dtype of the tensor. Only floating point types are
            supported. If not specified, `tf.keras.backend.floatx()` is used,
            which default to `float32` unless you configured it otherwise (via
            `tf.keras.backend.set_floatx(float_dtype)`)
          **kwargs: Additional keyword arguments.
        Nr	   )r   r   rd   )r4   r%   r&   rr   r5   r:   r^   r_   r'   r`   r   ra   r   r;   _generate_init_valrc   s          r   r   zVarianceScaling.__call__i  s     	00&9#Ju$56v%+,E!::&7># *:%&Hd+ "))''  &&U%u&MMr   c                 ^   | j                   }t        |      \  }}| j                  dk(  r|t        d|      z  }n4| j                  dk(  r|t        d|      z  }n|t        d||z   dz        z  }| j                  dk(  r7t        j                  |      dz  }| j                  j                  |d|||      S | j                  dk(  r4t        j                  |      }| j                  j                  |d|||      S t        j                  d	|z        }| j                  j                  || |||      S )
Nr         ?r          @r   g۶%?ry   r   g      @)r   _compute_fansr   maxr   mathsqrtrZ   r   rs   rb   )	r   r   r   rd   r   r   r   rn   limits	            r   r   z"VarianceScaling._generate_init_val  s4   

'.99 Sf%%EYY)#Sg&&ESv/3677E 22 YYu%(;;F))::sFE5  "66YYu%F))77sFE5  IIcEk*E))88vueU r   c                 `    | j                   | j                  | j                  | j                  dS )Nr   r   r   r!   r   r   s    r   r   zVarianceScaling.get_config  s*    ZZII --II	
 	
r   )r   r   r   Nr(   r&   r)   r*   r+   rH   r   r   r   r   r   r   r   r     s-    %R '%
NNB6
r   r   zkeras.initializers.Orthogonalzkeras.initializers.orthogonalc                   ,    e Zd ZdZddZddZd Zd Zy)	
Orthogonala  Initializer that generates an orthogonal matrix.

    Also available via the shortcut function `tf.keras.initializers.orthogonal`.

    If the shape of the tensor to initialize is two-dimensional, it is
    initialized with an orthogonal matrix obtained from the QR decomposition of
    a matrix of random numbers drawn from a normal distribution. If the matrix
    has fewer rows than columns then the output will have orthogonal rows.
    Otherwise, the output will have orthogonal columns.

    If the shape of the tensor to initialize is more than two-dimensional,
    a matrix of shape `(shape[0] * ... * shape[n - 2], shape[n - 1])`
    is initialized, where `n` is the length of the shape vector.
    The matrix is subsequently reshaped to give a tensor of the desired shape.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.Orthogonal()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.Orthogonal()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      gain: multiplicative factor to apply to the orthogonal matrix
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will produce the same
        random values across multiple calls.

    References:
      - [Saxe et al., 2014](https://openreview.net/forum?id=_wzZwKpTDF_9C)
    Nc                 X    || _         || _        t        j                  |d      | _        y rS   )gainr!   r   rY   rZ   )r   r   r!   s      r   rH   zOrthogonal.__init__  s(    		!(!8!8;"
r   c                    t        | j                  j                  |d       t        t	        |            }t        |      dk  rt        d| dt        |       d      | j                          |j                  dd      }|r-t                t        j                  | j                  |||	      S | j                  ||      S )
a  Returns a tensor object initialized to an orthogonal matrix.

        Args:
          shape: Shape of the tensor.
          dtype: Optional dtype of the tensor. Only floating point types are
            supported. If not specified, `tf.keras.backend.floatx()` is used,
           which default to `float32` unless you configured it otherwise
           (via `tf.keras.backend.set_floatx(float_dtype)`)
          **kwargs: Additional keyword arguments.
        Fsupport_partition   zKThe tensor to initialize must be at least two-dimensional. Received: shape=	 of rank r2   r	   Nr3   )r4   r%   r&   rr   r5   lenr9   r'   r   ra   r   r;   r   r=   s        r   r   zOrthogonal.__call__  s     	NN##Vu	
 $Ju$56u:>yUA7 
 	Hd+ "))''uE  &&ue44r   c                    d}|d d D ]  }||z  }	 |d   }t        ||      t        ||      f}| j                  j                  ||      }t        j
                  j                  |d      \  }}	t        j
                  j                  |	      }
|t	        j                  |
      z  }||k  rt        j
                  j                  |      }| j                  t	        j                  ||      z  S )N   r   F)full_matrices)r   minrZ   rs   r7   linalgqrtensor_diag_partsignmatrix_transposer   reshape)r   r   r   num_rowsdimnum_cols
flat_shapeaqrds              r   r   zOrthogonal._generate_init_val  s     ": 	COH	9(H-s8X/FG
 ""0050Iyy||AU|31II&&q)	RWWQZh		**1-Ayy2::a///r   c                 4    | j                   | j                  dS )Nr   r!   r   r   s    r   r   zOrthogonal.get_config  s    		49955r   )r   Nr(   r   r   r   r   r   r     s    !F
5>0(6r   r   zkeras.initializers.Identityzkeras.initializers.identityc                   ,    e Zd ZdZddZddZd Zd Zy)	Identitya3  Initializer that generates the identity matrix.

    Also available via the shortcut function `tf.keras.initializers.identity`.

    Only usable for generating 2D matrices.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.Identity()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.Identity()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      gain: Multiplicative factor to apply to the identity matrix.
    c                     || _         y r(   r   )r   r   s     r   rH   zIdentity.__init__+  s	    	r   Nc                 R   t        | j                  j                  |d       t        t	        |            }t        |      dk7  rt        d| dt        |       d      |j                  dd      }|r#t        j                  | j                  |||	      S | j                  ||      S )
a  Returns a tensor object initialized to a 2D identity matrix.

        Args:
          shape: Shape of the tensor. It should have exactly rank 2.
          dtype: Optional dtype of the tensor. Only floating point types are
           supported. If not specified, `tf.keras.backend.floatx()` is used,
           which default to `float32` unless you configured it otherwise
           (via `tf.keras.backend.set_floatx(float_dtype)`)
          **kwargs: Additional keyword arguments.
        Fr   r   zNIdentity matrix initializer can only be used for 2D matrices. Received: shape=r   r2   r	   Nr3   )r4   r%   r&   rr   r5   r   r9   r   r   r;   r   r=   s        r   r   zIdentity.__call__.  s     	NN##Vu	
 $Ju$56u:?##('3u:,aA  Hd+))''uE  &&ue44r   c                 J    t        j                  |d|i}| j                  |z  S )Nr   )r7   eyer   )r   r   r   initializers       r   r   zIdentity._generate_init_valI  s$    ffe151yy;&&r   c                     d| j                   iS )Nr   r   r   s    r   r   zIdentity.get_configM      		""r   )r   r(   r   r   r   r   r   r     s    (56'#r   r   z keras.initializers.GlorotUniformz!keras.initializers.glorot_uniformc                   *     e Zd ZdZd fd	Zd Z xZS )GlorotUniformaj  The Glorot uniform initializer, also called Xavier uniform initializer.

    Also available via the shortcut function
    `tf.keras.initializers.glorot_uniform`.

    Draws samples from a uniform distribution within `[-limit, limit]`, where
    `limit = sqrt(6 / (fan_in + fan_out))` (`fan_in` is the number of input
    units in the weight tensor and `fan_out` is the number of output units).

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.GlorotUniform()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.GlorotUniform()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will not produce the same
        random values across multiple calls, but multiple initializers will
        produce the same sequence when constructed with the same seed value.

    References:
      - [Glorot et al., 2010](http://proceedings.mlr.press/v9/glorot10a.html)
    c                 ,    t         |   ddd|       y )Nr   r   r   r   superrH   r   r!   r%   s     r   rH   zGlorotUniform.__init__t  s    IID 	 	
r   c                     d| j                   iS Nr!   r!   r   s    r   r   zGlorotUniform.get_configy  r   r   r(   r&   r)   r*   r+   rH   r   __classcell__r%   s   @r   r   r   Q  s    :

#r   r   zkeras.initializers.GlorotNormalz keras.initializers.glorot_normalc                   *     e Zd ZdZd fd	Zd Z xZS )GlorotNormala  The Glorot normal initializer, also called Xavier normal initializer.

    Also available via the shortcut function
    `tf.keras.initializers.glorot_normal`.

    Draws samples from a truncated normal distribution centered on 0 with
    `stddev = sqrt(2 / (fan_in + fan_out))` where `fan_in` is the number of
    input units in the weight tensor and `fan_out` is the number of output units
    in the weight tensor.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.GlorotNormal()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.GlorotNormal()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will not produce the same
        random values across multiple calls, but multiple initializers will
        produce the same sequence when constructed with the same seed value.

    References:
      - [Glorot et al., 2010](http://proceedings.mlr.press/v9/glorot10a.html)
    c                 ,    t         |   ddd|       y )Nr   r   r   r   r   r   s     r   rH   zGlorotNormal.__init__  s!    +	 	 	
r   c                     d| j                   iS r   r   r   s    r   r   zGlorotNormal.get_config  r   r   r(   r   r   s   @r   r   r   }  s    <
#r   r   zkeras.initializers.LecunNormalzkeras.initializers.lecun_normalc                   *     e Zd ZdZd fd	Zd Z xZS )LecunNormala  Lecun normal initializer.

     Also available via the shortcut function
    `tf.keras.initializers.lecun_normal`.

    Initializers allow you to pre-specify an initialization strategy, encoded in
    the Initializer object, without knowing the shape and dtype of the variable
    being initialized.

    Draws samples from a truncated normal distribution centered on 0 with
    `stddev = sqrt(1 / fan_in)` where `fan_in` is the number of input units in
    the weight tensor.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.LecunNormal()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.LecunNormal()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will not produce the same
        random values across multiple calls, but multiple initializers will
        produce the same sequence when constructed with the same seed value.

    References:
      - [Klambauer et al., 2017](https://arxiv.org/abs/1706.02515)
    c                 ,    t         |   ddd|       y )Nr   r   r   r   r   r   s     r   rH   zLecunNormal.__init__      H3ED 	 	
r   c                     d| j                   iS r   r   r   s    r   r   zLecunNormal.get_config  r   r   r(   r   r   s   @r   r   r     s    B

#r   r   zkeras.initializers.LecunUniformz keras.initializers.lecun_uniformc                   *     e Zd ZdZd fd	Zd Z xZS )LecunUniforma  Lecun uniform initializer.

     Also available via the shortcut function
    `tf.keras.initializers.lecun_uniform`.

    Draws samples from a uniform distribution within `[-limit, limit]`, where
    `limit = sqrt(3 / fan_in)` (`fan_in` is the number of input units in the
    weight tensor).

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.LecunUniform()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.LecunUniform()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will not produce the same
        random values across multiple calls, but multiple initializers will
        produce the same sequence when constructed with the same seed value.

    References:
      - [Klambauer et al., 2017](https://arxiv.org/abs/1706.02515)
    c                 ,    t         |   ddd|       y )Nr   r   r   r   r   r   s     r   rH   zLecunUniform.__init__      H94 	 	
r   c                     d| j                   iS r   r   r   s    r   r   zLecunUniform.get_config  r   r   r(   r   r   s   @r   r   r         :

#r   r   zkeras.initializers.HeNormalzkeras.initializers.he_normalc                   *     e Zd ZdZd fd	Zd Z xZS )HeNormala  He normal initializer.

     Also available via the shortcut function
    `tf.keras.initializers.he_normal`.

    It draws samples from a truncated normal distribution centered on 0 with
    `stddev = sqrt(2 / fan_in)` where `fan_in` is the number of input units in
    the weight tensor.

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.HeNormal()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.HeNormal()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will not produce the same
        random values across multiple calls, but multiple initializers will
        produce the same sequence when constructed with the same seed value.

    References:
      - [He et al., 2015](https://arxiv.org/abs/1502.01852)
    c                 ,    t         |   ddd|       y )Nr   r   r   r   r   r   s     r   rH   zHeNormal.__init__$  r   r   c                     d| j                   iS r   r   r   s    r   r   zHeNormal.get_config)  r   r   r(   r   r   s   @r   r   r     r   r   r   zkeras.initializers.HeUniformzkeras.initializers.he_uniformc                   *     e Zd ZdZd fd	Zd Z xZS )	HeUniforma  He uniform variance scaling initializer.

     Also available via the shortcut function
    `tf.keras.initializers.he_uniform`.

    Draws samples from a uniform distribution within `[-limit, limit]`, where
    `limit = sqrt(6 / fan_in)` (`fan_in` is the number of input units in the
    weight tensor).

    Examples:

    >>> # Standalone usage:
    >>> initializer = tf.keras.initializers.HeUniform()
    >>> values = initializer(shape=(2, 2))

    >>> # Usage in a TF-Keras layer:
    >>> initializer = tf.keras.initializers.HeUniform()
    >>> layer = tf.keras.layers.Dense(3, kernel_initializer=initializer)

    Args:
      seed: A Python integer. Used to make the behavior of the initializer
        deterministic. Note that a seeded initializer will not produce the same
        random values across multiple calls, but multiple initializers will
        produce the same sequence when constructed with the same seed value.

    References:
      - [He et al., 2015](https://arxiv.org/abs/1502.01852)
    c                 ,    t         |   ddd|       y )Nr   r   r   r   r   r   s     r   rH   zHeUniform.__init__N  r   r   c                     d| j                   iS r   r   r   s    r   r   zHeUniform.get_configS  r   r   r(   r   r   s   @r   r   r   -  r   r   r   c                 X    | t        j                         } t        j                  |       S r(   )r   floatxr7   as_dtyper   s    r   r5   r5   W  s"    } ;;ur   c                 f    t        j                  |       } | j                  st        d|  d      | S )a	  Validate and return floating point type based on `dtype`.

    `dtype` must be a floating point type.

    Args:
      dtype: The data type to validate.

    Returns:
      Validated type.

    Raises:
      ValueError: if `dtype` is not a floating point type.
    z"Expected floating point type, got r2   )r7   r   r\   r9   r   s    r   rr   rr   ]  s5     KKE=eWAFGGLr   c                     t        |       dk  rdx}}nPt        |       dk(  r| d   x}}n:t        |       dk(  r| d   }| d   }n!d}| dd D ]  }||z  }	 | d   |z  }| d   |z  }t        |      t        |      fS )zComputes the number of input and output units for a weight shape.

    Args:
      shape: Integer shape tuple or TF tensor shape.

    Returns:
      A tuple of integer scalars (fan_in, fan_out).
    r   r   r   Nr   )r   int)r   r   r   receptive_field_sizer   s        r   r   r   q  s     5zA~	Uq 8#	Uqq(  !": 	(C C' 	(r11)22v;G$$r   c                     |D cg c]  }|t         vs| }}|rt        d| dt          d      |st        |v st        |v rt	        |  d      y y c c}w )NzUnknown keyword arguments: z. Allowed keyword arguments: r2   z9 initializer doesn't support partition-related arguments.)_ALLOWED_INITIALIZER_KWARGS	TypeErrorr:   r_   r9   )cls_namer   r   kinvalid_kwargss        r   r4   r4     s    !'PA14O+OaPNP).)9 :""=!>aA
 	
 F"&76&Aj + +
 	
 'B  Qs
   AAc                  P    t        t        j                  dd      st        d      y)a  Make sure the keras.backend global seed generator is set.

    This is important for DTensor use case to ensure that each client are
    initialized with same seed for tf.random.Generator, so that the value
    created are in sync among all the clients.
    	generatorNzWhen using DTensor APIs, you need to set the global seed before using any TF-Keras initializers. Please make sure to call `tf.keras.utils.set_random_seed()` in your code.)r"   r   _SEED_GENERATORr9   r   r   r   ra   ra     s.     7**K>G
 	
 ?r   )T)(r+   r   r#   tensorflow.compat.v2compatv2r7   tf_keras.srcr   tf_keras.src.dtensorr   tf_keras.src.savingr    tensorflow.python.util.tf_exportr   r:   r_   _LAYOUTr   r   r/   r@   rD   rQ   rj   r{   r   r   r   r   r   r   r   r   r   r5   rr   r   r4   ra   r   r   r   <module>r     s       ! !   & 1 :$ & 
/1BGL  ./^ ^ 0^B (*DL&&K && M&&R ')BrJ&%; &% K&%R !#@R;{ ;;| &'	
KQK KQ
KQ\ %'IbHM; HMHMV ()	
RMk RM
RMj ()	
Q
k Q

Q
h #%D_6 _6_6D !#@R8#{ 8#8#v &'	
$#O $#
$#N %'Ib(#? (#(#V $&GB(#/ (#(#V %'Ib$#? $#$#N !#Ab$# $#$#N "$C$# $#$#N(%6
 
r   