
    +ho                        S SK r S SKJs  Jr  S SK Jr  SSKJr  SSKJrJ	r	  \" 5       (       a  S SK
r
\R                  \R                  \R                  \R                  \R                  S.rS\S\R"                  4S	 jr " S
 S\R"                  5      r " S S\R"                  5      r " S S\R"                  5      r " S S\R"                  5      r " S S\R"                  5      r " S S\R"                  5      rg)    N)nn   )	deprecate)is_torch_npu_availableis_torch_version)swishsilumishgelureluact_fnreturnc           	          U R                  5       n U [        ;   a  [        U    " 5       $ [        SU  S[        [        R	                  5       5       35      e)zHelper function to get activation function from string.

Args:
    act_fn (str): Name of activation function.

Returns:
    nn.Module: Activation function.
zactivation function z not found in ACT2FN mapping )lowerACT2CLS
ValueErrorlistkeys)r   s    V/home/james-whalen/.local/lib/python3.13/site-packages/diffusers/models/activations.pyget_activationr   $   sO     \\^Fv  /x7TUYZaZfZfZhUiTjkll    c                   f   ^  \ rS rSrSrU 4S jrS\R                  S\R                  4S jrSr	U =r
$ )FP32SiLU5   z@
SiLU activation function with input upcasted to torch.float32.
c                 "   > [         TU ]  5         g N)super__init__)self	__class__s    r   r   FP32SiLU.__init__:   s    r   inputsr   c                 z    [         R                  " UR                  5       SS9R                  UR                  5      $ )NF)inplace)Fr	   floattodtype)r   r"   s     r   forwardFP32SiLU.forward=   s(    vvfllne477EEr    )__name__
__module____qualname____firstlineno____doc__r   torchTensorr)   __static_attributes____classcell__r    s   @r   r   r   5   s1    Fell Fu|| F Fr   r   c            	          ^  \ rS rSrSrSS\S\S\S\4U 4S jjjrS\	R                  S	\	R                  4S
 jrS rSrU =r$ )GELUA   a  
GELU activation function with tanh approximation support with `approximate="tanh"`.

Parameters:
    dim_in (`int`): The number of channels in the input.
    dim_out (`int`): The number of channels in the output.
    approximate (`str`, *optional*, defaults to `"none"`): If `"tanh"`, use tanh approximation.
    bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
dim_indim_outapproximatebiasc                 b   > [         TU ]  5         [        R                  " XUS9U l        X0l        g Nr<   )r   r   r   Linearprojr;   )r   r9   r:   r;   r<   r    s        r   r   GELU.__init__L   s&    IIfD9	&r   gater   c                 6   UR                   R                  S:X  ab  [        SS5      (       aQ  [        R                  " UR                  [        R                  S9U R                  S9R                  UR                  S9$ [        R                  " XR                  S9$ )Nmps<2.0.0r(   )r;   )
devicetyper   r%   r   r'   r1   float32r;   r(   r   rC   s     r   r   	GELU.geluQ   sq    ;;u$)9#w)G)G66$'''6DDTDTUXX_c_i_iXjjvvd(8(899r   c                 J    U R                  U5      nU R                  U5      nU$ r   )rA   r   r   hidden_statess     r   r)   GELU.forwardW   s$    		-0		-0r   )r;   rA   )noneT)r,   r-   r.   r/   r0   intstrboolr   r1   r2   r   r)   r3   r4   r5   s   @r   r7   r7   A   sT    's 'S 's 'SW ' '
: :%,, : r   r7   c                      ^  \ rS rSrSrSS\S\S\4U 4S jjjrS\R                  S\R                  4S	 jr
S
 rSrU =r$ )GEGLU]   a6  
A [variant](https://huggingface.co/papers/2002.05202) of the gated linear unit activation function.

Parameters:
    dim_in (`int`): The number of channels in the input.
    dim_out (`int`): The number of channels in the output.
    bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
r9   r:   r<   c                 \   > [         TU ]  5         [        R                  " XS-  US9U l        g Nr   r?   r   r   r   r@   rA   r   r9   r:   r<   r    s       r   r   GEGLU.__init__g   s$    IIfk=	r   rC   r   c                    UR                   R                  S:X  aY  [        SS5      (       aH  [        R                  " UR                  [        R                  S95      R                  UR                  S9$ [        R                  " U5      $ )NrE   rF   rG   rH   )	rI   rJ   r   r%   r   r'   r1   rK   r(   rL   s     r   r   
GEGLU.geluk   s_    ;;u$)9#w)G)G66$'''67:::LLvvd|r   c                     [        U5      S:  d  UR                  SS 5      b  Sn[        SSU5        U R                  U5      n[	        5       (       a  [
        R                  " USSS9S   $ UR                  SSS	9u  pXR                  U5      -  $ )
Nr   scalezThe `scale` argument is deprecated and will be ignored. Please remove it, as passing it will raise an error in the future. `scale` should directly be passed while calling the underlying pipeline component i.e., via `cross_attention_kwargs`.z1.0.0   )dimr;   r   rd   )	lengetr   rA   r   	torch_npu	npu_gegluchunkr   )r   rP   argskwargsdeprecation_messagerC   s         r   r)   GEGLU.forwardq   s    t9q=FJJw5A #Ugw(;<		-0!##&&}"!LQOO"/"5"5aR"5"@M 99T?22r   rA   T)r,   r-   r.   r/   r0   rS   rU   r   r1   r2   r   r)   r3   r4   r5   s   @r   rW   rW   ]   sL    >s >S > > > %,, 
3 
3r   rW   c                   F   ^  \ rS rSrSrS	S\S\S\4U 4S jjjrS rSr	U =r
$ )
SwiGLU~   au  
A [variant](https://huggingface.co/papers/2002.05202) of the gated linear unit activation function. It's similar to
`GEGLU` but uses SiLU / Swish instead of GeLU.

Parameters:
    dim_in (`int`): The number of channels in the input.
    dim_out (`int`): The number of channels in the output.
    bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
r9   r:   r<   c                    > [         TU ]  5         [        R                  " XS-  US9U l        [        R
                  " 5       U l        g rZ   )r   r   r   r@   rA   SiLU
activationr\   s       r   r   SwiGLU.__init__   s1    IIfk=	'')r   c                 n    U R                  U5      nUR                  SSS9u  pXR                  U5      -  $ )Nr   rb   re   )rA   rj   rv   )r   rP   rC   s      r   r)   SwiGLU.forward   s:    		-0+11!1<t444r   rv   rA   rp   )r,   r-   r.   r/   r0   rS   rU   r   r)   r3   r4   r5   s   @r   rr   rr   ~   s1    $s $S $ $ $5 5r   rr   c                   z   ^  \ rS rSrSrSS\S\S\4U 4S jjjrS\R                  S\R                  4S	 jr
S
rU =r$ )ApproximateGELU   al  
The approximate form of the Gaussian Error Linear Unit (GELU). For more details, see section 2 of this
[paper](https://huggingface.co/papers/1606.08415).

Parameters:
    dim_in (`int`): The number of channels in the input.
    dim_out (`int`): The number of channels in the output.
    bias (`bool`, defaults to True): Whether to use a bias in the linear layer.
r9   r:   r<   c                 V   > [         TU ]  5         [        R                  " XUS9U l        g r>   r[   r\   s       r   r   ApproximateGELU.__init__   s     IIfD9	r   xr   c                 \    U R                  U5      nU[        R                  " SU-  5      -  $ )NgZd;?)rA   r1   sigmoid)r   r   s     r   r)   ApproximateGELU.forward   s'    IIaL5==+++r   ro   rp   )r,   r-   r.   r/   r0   rS   rU   r   r1   r2   r)   r3   r4   r5   s   @r   r|   r|      sG    :s :S : : :, ,%,, , ,r   r|   c            	       F   ^  \ rS rSrS	S\S\S\S\4U 4S jjjrS rSr	U =r
$ )
LinearActivation   r9   r:   r<   rv   c                 v   > [         TU ]  5         [        R                  " XUS9U l        [        U5      U l        g r>   )r   r   r   r@   rA   r   rv   )r   r9   r:   r<   rv   r    s        r   r   LinearActivation.__init__   s,    IIfD9	(4r   c                 F    U R                  U5      nU R                  U5      $ r   )rA   rv   rO   s     r   r)   LinearActivation.forward   s    		-0}--r   rz   )Tr	   )r,   r-   r.   r/   rS   rU   rT   r   r)   r3   r4   r5   s   @r   r   r      s4    5s 5S 5 5QT 5 5. .r   r   )r1   torch.nn.functionalr   
functionalr%   utilsr   utils.import_utilsr   r   rh   ru   Mishr7   ReLUr   rT   Moduler   r   rW   rr   r|   r   r+   r   r   <module>r      s          I  WWGGGGGGGGm3 m299 m"	Fryy 	F299 83BII 3B5RYY 5.,bii ,(	.ryy 	.r   