
    sh              	           S r SSKJrJr  SSKJr  SSKJrJr  SSK	J
r
  SSKJrJr  / SQr " S S	5      r\" S
\
S9rSS\S\S\S\4S jjrSS\S\S\4S jjrg)z;Weight Normalization from https://arxiv.org/abs/1602.07868.    )AnyTypeVar)
deprecated)_weight_normnorm_except_dim)Module)	ParameterUninitializedParameter)
WeightNormweight_normremove_weight_normc                       \ rS rSr% \\S'   \\S'   S\S\SS4S jrS\S\	4S jr
\\" S	\S
9S\S\SS 4S j5       5       rS\SS4S jrS\S\	SS4S jrSrg)r      namedimreturnNc                 &    Uc  SnXl         X l        g )N)r   r   )selfr   r   s      n/Users/tiagomarins/Projetos/claudeai/copy_bank/venv/lib/python3.13/site-packages/torch/nn/utils/weight_norm.py__init__WeightNorm.__init__   s    ;C	    modulec                     [        XR                  S-   5      n[        XR                  S-   5      n[        X2U R                  5      $ N_g_v)getattrr   r   r   )r   r   gvs       r   compute_weightWeightNorm.compute_weight   s9    FII,-FII,-A$((++r   ze`torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`.)categoryc           
      P   U R                   R                  5        H8  n[        U[        5      (       d  M  UR                  U:X  d  M,  [        SU 35      e   Uc  Sn[        X5      n[        X5      n[        U[        5      (       a  [        S5      eU R                  U	 U R                  US-   [        [        USU5      R                  5      5        U R                  US-   [        UR                  5      5        [        XUR                  U 5      5        U R!                  U5        U$ )Nz<Cannot register two weight_norm hooks on the same parameter r   zThe module passed to `WeightNorm` can't have uninitialized parameters. Make sure to run the dummy forward before applying weight normalizationr      r   )_forward_pre_hooksvalues
isinstancer   r   RuntimeErrorr   r
   
ValueError_parametersregister_parameterr	   r   datasetattrr"   register_forward_pre_hook)r   r   r   hookfnweights         r   applyWeightNorm.apply   s    --446D$
++		T0A"RSWRXY  7 ;C"&f455Z 
 t$ 	!!4K?61c#B#G#GH	
 	!!$+y/EFb//78 	((,	r   c                 
   U R                  U5      n[        XR                  5        UR                  U R                  S-   	 UR                  U R                  S-   	 [	        XR                  [        UR                  5      5        g r   )r"   delattrr   r,   r/   r	   r.   )r   r   r3   s      r   removeWeightNorm.removeE   sc    $$V,		"tyy4/0tyy4/0		9V[[#9:r   inputsc                 N    [        XR                  U R                  U5      5        g )N)r/   r   r"   )r   r   r:   s      r   __call__WeightNorm.__call__L   s    		4#6#6v#>?r   )r   r   )__name__
__module____qualname____firstlineno__str__annotations__intr   r   r   r"   staticmethodr   FutureWarningr4   r8   r<   __static_attributes__ r   r   r   r      s    
I	HS s t ,V , ,
 	E
C c l  B;V ; ;@v @s @t @r   r   T_module)boundr   r   r   r   c                 2    [         R                  XU5        U $ )a  Apply weight normalization to a parameter in the given module.

.. math::
     \mathbf{w} = g \dfrac{\mathbf{v}}{\|\mathbf{v}\|}

Weight normalization is a reparameterization that decouples the magnitude
of a weight tensor from its direction. This replaces the parameter specified
by :attr:`name` (e.g. ``'weight'``) with two parameters: one specifying the magnitude
(e.g. ``'weight_g'``) and one specifying the direction (e.g. ``'weight_v'``).
Weight normalization is implemented via a hook that recomputes the weight
tensor from the magnitude and direction before every :meth:`~Module.forward`
call.

By default, with ``dim=0``, the norm is computed independently per output
channel/plane. To compute a norm over the entire weight tensor, use
``dim=None``.

See https://arxiv.org/abs/1602.07868

.. warning::

    This function is deprecated.  Use :func:`torch.nn.utils.parametrizations.weight_norm`
    which uses the modern parametrization API.  The new ``weight_norm`` is compatible
    with ``state_dict`` generated from old ``weight_norm``.

    Migration guide:

    * The magnitude (``weight_g``) and direction (``weight_v``) are now expressed
      as ``parametrizations.weight.original0`` and ``parametrizations.weight.original1``
      respectively.  If this is bothering you, please comment on
      https://github.com/pytorch/pytorch/issues/102999

    * To remove the weight normalization reparametrization, use
      :func:`torch.nn.utils.parametrize.remove_parametrizations`.

    * The weight is no longer recomputed once at module forward; instead, it will
      be recomputed on every access.  To restore the old behavior, use
      :func:`torch.nn.utils.parametrize.cached` before invoking the module
      in question.

Args:
    module (Module): containing module
    name (str, optional): name of weight parameter
    dim (int, optional): dimension over which to compute the norm

Returns:
    The original module with the weight norm hook

Example::

    >>> m = weight_norm(nn.Linear(20, 40), name='weight')
    >>> m
    Linear(in_features=20, out_features=40, bias=True)
    >>> m.weight_g.size()
    torch.Size([40, 1])
    >>> m.weight_v.size()
    torch.Size([40, 20])

)r   r4   )r   r   r   s      r   r   r   S   s    x V3'Mr   c                     U R                   R                  5        HN  u  p#[        U[        5      (       d  M  UR                  U:X  d  M.  UR                  U 5        U R                   U	 U s  $    [        SU SU  35      e)zRemove the weight normalization reparameterization from a module.

Args:
    module (Module): containing module
    name (str, optional): name of weight parameter

Example:
    >>> m = weight_norm(nn.Linear(20, 40))
    >>> remove_weight_norm(m)
zweight_norm of 'z' not found in )r'   itemsr)   r   r   r8   r+   )r   r   kr1   s       r   r   r      sq     ,,224dJ''DII,=KK))!,M	 5 'v_VHE
FFr   N)r3   r   )r3   )__doc__typingr   r   typing_extensionsr   torchr   r   torch.nn.modulesr   torch.nn.parameterr	   r
   __all__r   rI   rB   rD   r   r   rH   r   r   <module>rV      sy    B  ( / # @ >?@ ?@D :V,= = =S = =@Gx Gs G( Gr   