
    +h3                        S SK r S SKJrJrJrJrJr  S SKrS SK	r	S SK
Js  Jr  S SKJr  S SKJrJrJrJr  SSKJrJr  SSKJr  SSKJrJr  SS	KJr  SS
KJ r   SSK!J"r"J#r#J$r$  SSK%J&r&  SSK'J(r(  SSK)J*r*  \"" 5       (       a  S SK+J,s  J-r.  Sr/OSr/\#R`                  " \15      r2Sr3S r4SS jr5S r6S S jr7S!S jr8    S"S\\9   S\\\:\	Rv                  4      S\\\9      S\\\<      4S jjr= " S S\5      r>g)#    N)CallableDictListOptionalUnion)Image)	BertModelBertTokenizerQwen2TokenizerQwen2VLForConditionalGeneration   )MultiPipelineCallbacksPipelineCallback)VaeImageProcessor)AutoencoderKLMagvitEasyAnimateTransformer3DModel)DiffusionPipeline)FlowMatchEulerDiscreteScheduler)is_torch_xla_availableloggingreplace_example_docstring)randn_tensor)VideoProcessor   )EasyAnimatePipelineOutputTFaY  
    Examples:
        ```python
        >>> import torch
        >>> from diffusers import EasyAnimateControlPipeline
        >>> from diffusers.pipelines.easyanimate.pipeline_easyanimate_control import get_video_to_video_latent
        >>> from diffusers.utils import export_to_video, load_video

        >>> pipe = EasyAnimateControlPipeline.from_pretrained(
        ...     "alibaba-pai/EasyAnimateV5.1-12b-zh-Control-diffusers", torch_dtype=torch.bfloat16
        ... )
        >>> pipe.to("cuda")

        >>> control_video = load_video(
        ...     "https://huggingface.co/alibaba-pai/EasyAnimateV5.1-12b-zh-Control/blob/main/asset/pose.mp4"
        ... )
        >>> prompt = (
        ...     "In this sunlit outdoor garden, a beautiful woman is dressed in a knee-length, sleeveless white dress. "
        ...     "The hem of her dress gently sways with her graceful dance, much like a butterfly fluttering in the breeze. "
        ...     "Sunlight filters through the leaves, casting dappled shadows that highlight her soft features and clear eyes, "
        ...     "making her appear exceptionally elegant. It seems as if every movement she makes speaks of youth and vitality. "
        ...     "As she twirls on the grass, her dress flutters, as if the entire garden is rejoicing in her dance. "
        ...     "The colorful flowers around her sway in the gentle breeze, with roses, chrysanthemums, and lilies each "
        ...     "releasing their fragrances, creating a relaxed and joyful atmosphere."
        ... )
        >>> sample_size = (672, 384)
        >>> num_frames = 49

        >>> input_video, _, _ = get_video_to_video_latent(control_video, num_frames, sample_size)
        >>> video = pipe(
        ...     prompt,
        ...     num_frames=num_frames,
        ...     negative_prompt="Twisted body, limb deformities, text subtitles, comics, stillness, ugliness, errors, garbled text.",
        ...     height=sample_size[0],
        ...     width=sample_size[1],
        ...     control_video=input_video,
        ... ).frames[0]
        >>> export_to_video(video, "output.mp4", fps=8)
        ```
c                    [        U [        R                  5      (       aI  [        R                  R                  R                  U R                  S5      USSS9R                  S5      n O[        U [        R                  5      (       a0  U R                  US   US   45      n [        R                  " U 5      n On[        U [        R                  5      (       aD  [        R                  " U 5      R                  US   US   45      n [        R                  " U 5      n O[        S5      e[        U [        R                  5      (       d8  [        R                  " U 5      R!                  SSS5      R#                  5       S-  n U $ )	z\
Preprocess a single image (PIL.Image, numpy.ndarray, or torch.Tensor) to a resized tensor.
r   bilinearFsizemodealign_cornersr   zKUnsupported input type. Expected PIL.Image, numpy.ndarray, or torch.Tensor.   g     o@)
isinstancetorchTensornn
functionalinterpolate	unsqueezesqueezer   resizenparrayndarray	fromarray
ValueError
from_numpypermutefloat)imagesample_sizes     v/home/james-whalen/.local/lib/python3.13/site-packages/diffusers/pipelines/easyanimate/pipeline_easyanimate_control.pypreprocess_imager7   ]   s#    %&&##//OOA[zQV 0 

'!* 	 
E5;;	'	'k!nk!n=>	E2::	&	&&--{1~{1~.NOfgg eU\\**  '//1a8>>@5HL    c                    U Gb>  U  Vs/ s H  n[        XRS9PM     n n[        R                  " U 5      S U n U R                  SSSS5      R	                  S5      n Ub  [        X2S9n[        R
                  " US:  SS	5      nUR	                  S5      R	                  S
5      R                  / SQ5      R	                  S5      n[        R                  " USSU R                  5       S   SS/5      nUR                  U R                  U R                  5      nO2[        R                  " U S S 2S S24   5      nS	US S 2S S 2S S 24'   OSu  pUb-  [        XBS9nUR                  SSSS5      R	                  S5      nOS nXU4$ s  snf )N)r5   r   r   r"   r   )r   g?           )r   r   r   r"   NN)r7   r$   stackr2   r)   wheretiler   todevicedtype
zeros_like)input_video
num_framesr5   validation_video_mask	ref_imageframeinput_video_masks          r6   get_video_to_video_latentrK   x   s   U`aU`E'GU`a kk+.{
; "))!Q15??B ,$45J$]!${{+@;+NPSUXY  099!<FFrJRRS_`jjklm$zz*:Q;CSCSCUVWCXZ[]^<_`/22;3E3E{GXGXY$//ArrE0BC(+Q1W%(2%$YA	%%aAq1;;A>		)33= bs   E=c                    UnUnU u  pVXV-  nXtU-  :  a  Un[        [        XE-  U-  5      5      n	OUn	[        [        X6-  U-  5      5      n[        [        XH-
  S-  5      5      n
[        [        X9-
  S-  5      5      nX4X-   X-   44$ )Ng       @)intround)src	tgt_width
tgt_heighttwthhwrresize_heightresize_widthcrop_top	crop_lefts               r6   get_resize_crop_region_for_gridr[      s    	B	BDA	AG}5!,-E"&1*-.5",345HE2,345I 8#;Y=U"VVVr8   c                     UR                  [        [        SUR                  5      5      SS9nU R                  [        [        SU R                  5      5      SS9nXU-  -  nX%-  SU-
  U -  -   n U $ )a  
Rescales `noise_cfg` tensor based on `guidance_rescale` to improve image quality and fix overexposure. Based on
Section 3.4 from [Common Diffusion Noise Schedules and Sample Steps are
Flawed](https://huggingface.co/papers/2305.08891).

Args:
    noise_cfg (`torch.Tensor`):
        The predicted noise tensor for the guided diffusion process.
    noise_pred_text (`torch.Tensor`):
        The predicted noise tensor for the text-guided diffusion process.
    guidance_rescale (`float`, *optional*, defaults to 0.0):
        A rescale factor applied to the noise predictions.

Returns:
    noise_cfg (`torch.Tensor`): The rescaled noise prediction tensor.
r   T)dimkeepdim)stdlistrangendim)	noise_cfgnoise_pred_textguidance_rescalestd_textstd_cfgnoise_pred_rescaleds         r6   rescale_noise_cfgri      s{    " ""tE!_5I5I,J'KUY"ZHmmU1inn%= >mMG#''9: 6!>N:NR[9[[Ir8   c           	         UR                  5       nU(       a  [        USS  5      nSUS'   [        R                  " U S S 2S S 2SS2S S 2S S 24   USSS9n[        USS  5      nUS   S-
  US'   US   S:w  aA  [        R                  " U S S 2S S 2SS 2S S 2S S 24   USSS9n[        R
                  " XV/SS9nU$ Un U$ [        USS  5      n[        R                  " XSSS9nU$ )Nr"   r   r   	trilinearFr   r]   )r   r`   Fr(   r$   cat)masklatentprocess_first_frame_onlylatent_sizetarget_sizefirst_frame_resizedremaining_frames_resizedresized_masks           r6   resize_maskrw      s   ++-K;qr?+AmmAqsAq!+UZ
 ;qr?+$Q!+Aq>Q'(}}Q12q!^$;[X]($ !99&9%TZ[\L 	 /L  ;qr?+}}T+]bcr8   num_inference_stepsrB   	timestepssigmasc                    Ub  Ub  [        S5      eUb  S[        [        R                  " U R                  5      R
                  R                  5       5      ;   nU(       d  [        SU R                   S35      eU R                  " S
X2S.UD6  U R                  n[        U5      nX14$ Ub  S[        [        R                  " U R                  5      R
                  R                  5       5      ;   nU(       d  [        SU R                   S35      eU R                  " S
XBS.UD6  U R                  n[        U5      nX14$ U R                  " U4S	U0UD6  U R                  nX14$ )a  
Calls the scheduler's `set_timesteps` method and retrieves timesteps from the scheduler after the call. Handles
custom timesteps. Any kwargs will be supplied to `scheduler.set_timesteps`.

Args:
    scheduler (`SchedulerMixin`):
        The scheduler to get timesteps from.
    num_inference_steps (`int`):
        The number of diffusion steps used when generating samples with a pre-trained model. If used, `timesteps`
        must be `None`.
    device (`str` or `torch.device`, *optional*):
        The device to which the timesteps should be moved to. If `None`, the timesteps are not moved.
    timesteps (`List[int]`, *optional*):
        Custom timesteps used to override the timestep spacing strategy of the scheduler. If `timesteps` is passed,
        `num_inference_steps` and `sigmas` must be `None`.
    sigmas (`List[float]`, *optional*):
        Custom sigmas used to override the timestep spacing strategy of the scheduler. If `sigmas` is passed,
        `num_inference_steps` and `timesteps` must be `None`.

Returns:
    `Tuple[torch.Tensor, int]`: A tuple where the first element is the timestep schedule from the scheduler and the
    second element is the number of inference steps.
zYOnly one of `timesteps` or `sigmas` can be passed. Please choose one to set custom valuesry   zThe current scheduler class zx's `set_timesteps` does not support custom timestep schedules. Please check whether you are using the correct scheduler.)ry   rB   rz   zv's `set_timesteps` does not support custom sigmas schedules. Please check whether you are using the correct scheduler.)rz   rB   rB    )
r0   setinspect	signatureset_timesteps
parameterskeys	__class__ry   len)	schedulerrx   rB   ry   rz   kwargsaccepts_timestepsaccept_sigmass           r6   retrieve_timestepsr      s}   > !3tuu'3w/@/@AXAX/Y/d/d/i/i/k+ll .y/B/B.C Da b  	M)MfM''	!)n )) 
	 C(9(9):Q:Q(R(](](b(b(d$ee.y/B/B.C D_ `  	GvGG''	!)n )) 	 3MFMfM''	))r8   c            5       h  ^  \ rS rSrSrSr/ SQrS\S\\	\
4   S\\\4   S\S	\4
U 4S
 jjr          S<S\\\\   4   S\S\S\\\\\   4      S\\R,                     S\\R,                     S\\R,                     S\\R,                     S\\R.                     S\\R0                     S\4S jjrS r      S=S jr S>S jrS r\S 5       r\S 5       r \S  5       r!\S! 5       r"\S" 5       r#\RH                  " 5       \%" \&5      SS#S$S$SSSS%S&SSS'SSSSSSS(SSS)/S'S4S\\\\   4   S*\\   S+\\   S,\\   S-\\RN                     S.\\RN                     S/\\RN                     S0\\   S1\\(   S\\\\\   4      S\\   S2\\(   S3\\\RR                  \\RR                     4      S)\\R,                     S\\R,                     S\\R,                     S\\R,                     S\\R,                     S4\\   S5\S6\\\*\\\+/S4   \,\-4      S7\\   S8\(S9\\\      40S: jj5       5       r.S;r/U =r0$ )?EasyAnimateControlPipelinei   a  
Pipeline for text-to-video generation using EasyAnimate.

This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the
library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)

EasyAnimate uses one text encoder [qwen2 vl](https://huggingface.co/Qwen/Qwen2-VL-7B-Instruct) in V5.1.

Args:
    vae ([`AutoencoderKLMagvit`]):
        Variational Auto-Encoder (VAE) Model to encode and decode video to and from latent representations.
    text_encoder (Optional[`~transformers.Qwen2VLForConditionalGeneration`, `~transformers.BertModel`]):
        EasyAnimate uses [qwen2 vl](https://huggingface.co/Qwen/Qwen2-VL-7B-Instruct) in V5.1.
    tokenizer (Optional[`~transformers.Qwen2Tokenizer`, `~transformers.BertTokenizer`]):
        A `Qwen2Tokenizer` or `BertTokenizer` to tokenize text.
    transformer ([`EasyAnimateTransformer3DModel`]):
        The EasyAnimate model designed by EasyAnimate Team.
    scheduler ([`FlowMatchEulerDiscreteScheduler`]):
        A scheduler to be used in combination with EasyAnimate to denoise the encoded image latents.
ztext_encoder->transformer->vae)latentsprompt_embedsnegative_prompt_embedsvaetext_encoder	tokenizertransformerr   c                   > [         TU ]  5         U R                  UUUUUS9  [        U SS 5      b   U R                  R
                  R                  OSU l        [        U SS 5      b  U R                  R                  OSU l	        [        U SS 5      b  U R                  R                  OSU l        [        U R                  S9U l        [        U R                  SSSS	9U l        [        U R                  S9U l        g )
N)r   r   r   r   r   r   Tr         )vae_scale_factorF)r   do_normalizedo_binarizedo_convert_grayscale)super__init__register_modulesgetattrr   configenable_text_attention_maskr   spatial_compression_ratiovae_spatial_compression_ratiotemporal_compression_ratiovae_temporal_compression_ratior   image_processormask_processorr   video_processor)selfr   r   r   r   r   r   s         r6   r   #EasyAnimateControlPipeline.__init__9  s     	%# 	 	
 t]D1= ##>> 	' 3:$t2L2XDHH..^_ 	* 4;43M3YDHH//_` 	+  1$BdBde/!??!%	
  .t?a?abr8   r   TNpromptnum_images_per_promptdo_classifier_free_guidancenegative_promptr   r   prompt_attention_masknegative_prompt_attention_maskrB   rC   max_sequence_lengthc           
         U
=(       d    U R                   R                  n
U	=(       d    U R                   R                  n	Ub  [        U[        5      (       a  SnO3Ub!  [        U[
        5      (       a  [        U5      nOUR                  S   nUGc  [        U[        5      (       a  SSUS./S./nOU Vs/ s H  nSSUS./S.PM     nnU Vs/ s H  oR                  R                  U/SSS	9PM      nnU R                  US
USSSSS9nUR                  U R                   R                  5      nUR                  nUR                  nU R                  (       a  U R                  UUSS9R                  S   nO[        S5      eUR!                  US5      nUR                  XS9nUR                  u  nnnUR!                  SUS5      nUR#                  UU-  US5      nUR                  U	S9nU(       Ga
  UGc  Ub   [        U[        5      (       a  SSUS./S./nOU Vs/ s H  nSSUS./S.PM     nnU Vs/ s H  oR                  R                  U/SSS	9PM      nnU R                  US
USSSSS9nUR                  U R                   R                  5      nUR                  nUR                  nU R                  (       a  U R                  UUSS9R                  S   nO[        S5      eUR!                  US5      nU(       aU  UR                  S   nUR                  XS9nUR!                  SUS5      nUR#                  X-  US5      nUR                  U	S9nXVXx4$ s  snf s  snf s  snf s  snf )as  
Encodes the prompt into text encoder hidden states.

Args:
    prompt (`str` or `List[str]`, *optional*):
        prompt to be encoded
    device: (`torch.device`):
        torch device
    dtype (`torch.dtype`):
        torch dtype
    num_images_per_prompt (`int`):
        number of images that should be generated per prompt
    do_classifier_free_guidance (`bool`):
        whether to use classifier free guidance or not
    negative_prompt (`str` or `List[str]`, *optional*):
        The prompt or prompts not to guide the image generation. If not defined, one has to pass
        `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is
        less than `1`).
    prompt_embeds (`torch.Tensor`, *optional*):
        Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not
        provided, text embeddings will be generated from `prompt` input argument.
    negative_prompt_embeds (`torch.Tensor`, *optional*):
        Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt
        weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input
        argument.
    prompt_attention_mask (`torch.Tensor`, *optional*):
        Attention mask for the prompt. Required when `prompt_embeds` is passed directly.
    negative_prompt_attention_mask (`torch.Tensor`, *optional*):
        Attention mask for the negative prompt. Required when `negative_prompt_embeds` is passed directly.
    max_sequence_length (`int`, *optional*): maximum sequence length to use for the prompt.
r   r   usertext)typer   )rolecontentFT)tokenizeadd_generation_prompt
max_lengthrightpt)r   paddingr   
truncationreturn_attention_maskpadding_sidereturn_tensors)	input_idsattention_maskoutput_hidden_stateszLLM needs attention_mask)rC   rB   r<   rB   )r   rC   rB   r#   strr`   r   shaper   apply_chat_templaterA   r   r   r   hidden_statesr0   repeatview)r   r   r   r   r   r   r   r   r   rB   rC   r   
batch_sizemessages_promptmr   text_inputstext_input_idsbs_embedseq_len__negative_prompts                          r6   encode_prompt(EasyAnimateControlPipeline.encode_prompt`  s   Z 0**0034,,33*VS"9"9JJvt$<$<VJ&,,Q/J &#&& !'-3V$D#E $*
 $* !'-3W$E#F $*   nvmuhi22A3^b2cmu   ..$.&*$# ) K &..):):)A)ABK(22N$/$>$>!.. $ 1 1,=Rim !2 !-!$ !!;<<$9$@$@AVXY$Z!%((u(D,22'1%,,Q0EqI%**86K+KWVXY 5 8 8 8 G '+A+I*z/3/O/O !'-3_$M#N -<
 -<( !'-3=M$N#O -<   nvmuhi22A3^b2cmu   ..$.&*$# ) K &..):):)A)ABK(22N-8-G-G*..)-):):,#A)- *; *  -	*$& !!;<<-K-R-RShjk-l*&,2215G%;%>%>U%>%Z"%;%B%B1F[]^%_"%;%@%@Acelnp%q"-K-N-NV\-N-]*6KkkAXs   5M%M"M'7%M,c                 n   S[        [        R                  " U R                  R                  5      R
                  R                  5       5      ;   n0 nU(       a  X$S'   S[        [        R                  " U R                  R                  5      R
                  R                  5       5      ;   nU(       a  XS'   U$ )Neta	generator)r}   r~   r   r   stepr   r   )r   r   r   accepts_etaextra_step_kwargsaccepts_generators         r6   prepare_extra_step_kwargs4EasyAnimateControlPipeline.prepare_extra_step_kwargs  s     s7#4#4T^^5H5H#I#T#T#Y#Y#[\\'*e$ (3w/@/@ATAT/U/`/`/e/e/g+hh-6k*  r8   c
           
        ^  US-  S:w  d	  US-  S:w  a  [        SU SU S35      eU	bW  [        U 4S jU	 5       5      (       d=  [        ST R                   SU	 V
s/ s H  oT R                  ;  d  M  U
PM     sn
 35      eUb  Ub  [        S	U S
U S35      eUc  Uc  [        S5      eUbA  [        U[        5      (       d,  [        U[
        5      (       d  [        S[        U5       35      eUb  Uc  [        S5      eUb  Ub  [        SU SU S35      eUb  Uc  [        S5      eUbE  UbA  UR                  UR                  :w  a&  [        SUR                   SUR                   S35      eg g g s  sn
f )N   r   z8`height` and `width` have to be divisible by 16 but are z and .c              3   @   >#    U  H  oTR                   ;   v   M     g 7fN)_callback_tensor_inputs).0kr   s     r6   	<genexpr>:EasyAnimateControlPipeline.check_inputs.<locals>.<genexpr>#  s      F
7Y!---7Ys   z2`callback_on_step_end_tensor_inputs` has to be in z, but found zCannot forward both `prompt`: z and `prompt_embeds`: z2. Please make sure to only forward one of the two.zeProvide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.z2`prompt` has to be of type `str` or `list` but is zEMust provide `prompt_attention_mask` when specifying `prompt_embeds`.z'Cannot forward both `negative_prompt`: z and `negative_prompt_embeds`: zWMust provide `negative_prompt_attention_mask` when specifying `negative_prompt_embeds`.zu`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but got: `prompt_embeds` z != `negative_prompt_embeds` )r0   allr   r#   r   r`   r   r   )r   r   heightwidthr   r   r   r   r   "callback_on_step_end_tensor_inputsr   s   `          r6   check_inputs'EasyAnimateControlPipeline.check_inputs  s4    B;!urzQWX^W__dejdkklmnn-9# F
7YF
 C
 C
 DTEaEaDbbn  |^  pH  |^vw  ko  kG  kG  bGpq  |^  pH  oI  J  -";08N}o ^0 0  ^ 5w  FC)@)@TZ\`IaIaQRVW]R^Q_`aa$)>)Fdee&+A+M9/9J K*++]_ 
 "-2P2Xvww$)?)K""&<&B&BB --:-@-@,A B.445Q8  C *L$7 pHs   E01E0c
                    U	b  U	R                  XvS9$ UUUS-
  U R                  -  S-   X@R                  -  XPR                  -  4n
[        U[        5      (       a*  [        U5      U:w  a  [        S[        U5       SU S35      e[        XXvS9n	[        U R                  S5      (       a  XR                  R                  -  n	U	$ )NrB   rC   r   z/You have passed a list of generators of length z+, but requested an effective batch size of z@. Make sure the batch size matches the length of the generators.)r   rB   rC   init_noise_sigma)rA   r   r   r#   r`   r   r0   r   hasattrr   r   )r   r   num_channels_latentsrF   r   r   rC   rB   r   r   r   s              r6   prepare_latents*EasyAnimateControlPipeline.prepare_latentsJ  s     ::V:99  !^ C CCaG888777
 i&&3y>Z+GA#i.AQ R&<'gi 
 u&V4>>#566 ? ??Gr8   c
                    Ub  UR                  XvS9nSn
/ n[        SUR                  S   U
5       HH  nXX-    nU R                  R	                  U5      S   nUR                  5       nUR                  U5        MJ     [        R                  " USS9nXR                  R                  R                  -  nUb  UR                  XvS9nSn
/ n[        SUR                  S   U
5       HH  nX,X-    nU R                  R	                  U5      S   nUR                  5       nUR                  U5        MJ     [        R                  " USS9nUU R                  R                  R                  -  nUU4$ S nUU4$ )Nr   r   r   rl   )rA   ra   r   r   encoder    appendr$   rn   r   scaling_factor)r   controlcontrol_imager   r   r   rC   rB   r   r   bsnew_controli
control_bsnew_control_pixel_valuescontrol_pixel_values_bscontrol_image_latentss                    r6   prepare_control_latents2EasyAnimateControlPipeline.prepare_control_latentsd  s{    jjj<GBK1gmmA.3$0
!XX__Z8;
'__.
"":.	 4
 ii3G > >>G$),,F,HMB')$1m11!4b9*7AF*C'*.((//:Q*RST*U'*A*F*F*H'(//0GH	 :
 %*II.FA$N!$9DHHOO<Z<Z$Z! --- %)!---r8   c                     U R                   $ r   _guidance_scaler   s    r6   guidance_scale)EasyAnimateControlPipeline.guidance_scale  s    ###r8   c                     U R                   $ r   )_guidance_rescaler	  s    r6   re   +EasyAnimateControlPipeline.guidance_rescale  s    %%%r8   c                      U R                   S:  $ )Nr   r  r	  s    r6   r   6EasyAnimateControlPipeline.do_classifier_free_guidance  s    ##a''r8   c                     U R                   $ r   )_num_timestepsr	  s    r6   num_timesteps(EasyAnimateControlPipeline.num_timesteps  s    """r8   c                     U R                   $ r   )
_interruptr	  s    r6   	interrupt$EasyAnimateControlPipeline.interrupt  s    r8   1   i   2   g      @r:   pilr   rF   r   r   control_videocontrol_camera_videorH   rx   r
  r   r   output_typereturn_dictcallback_on_step_endr   re   ry   c                    [        U[        [        45      (       a  UR                  n[	        US-  S-  5      n[	        US-  S-  5      nU R                  UUUU
UUUUU5	        Xl        UU l        SU l        Ub  [        U[        5      (       a  SnO3Ub!  [        U[        5      (       a  [        U5      nOUR                  S   nU R                  nU R                  b  U R                  R                  nOU R                   R                  nU R#                  UUUUU R$                  U
UUUUSS9u  nnnn[        U R&                  [(        5      (       a  [+        U R&                  UUUSS9u  nnO[+        U R&                  UUU5      u  nnU R&                  R,                  nU R.                  R0                  R2                  nU R5                  UU-  UUUUUUUU5	      nUbN  [7        XnSS	9nUS
-  nU R$                  (       a  [8        R:                  " U/S-  5      OUR=                  UU5      nGOcUb  UR                  u  nnnn n!U R>                  RA                  URC                  SSSSS5      RE                  UU-  UU U!5      UUS9nUR=                  [8        RF                  S9nURE                  UUUX45      RC                  SSSSS5      nU RI                  SUUUUUUUU R$                  5	      S   nU R$                  (       a  [8        R:                  " U/S-  5      OUR=                  UU5      nOc[8        RJ                  " U5      R=                  UU5      nU R$                  (       a  [8        R:                  " U/S-  5      OUR=                  UU5      nUGbY  UR                  u  nnnn n!U R>                  RA                  URC                  SSSSS5      RE                  UU-  UU U!5      UUS9nUR=                  [8        RF                  S9nURE                  UUUX45      RC                  SSSSS5      nU RI                  SUUUUUR                  UUU R$                  5	      S   n"[8        RJ                  " U5      n#URM                  5       S   S:w  a  U"U#SS2SS2SS24'   U R$                  (       a  [8        R:                  " U#/S-  5      OU#R=                  UU5      n#[8        R:                  " UU#/SS9nOj[8        RJ                  " U5      n#U R$                  (       a  [8        R:                  " U#/S-  5      OU#R=                  UU5      n#[8        R:                  " UU#/SS9nU RO                  X5      n$U R$                  (       a0  [8        R:                  " UU/5      n[8        R:                  " UU/5      nUR=                  US9nUR=                  US9n[        U5      XR&                  RP                  -  -
  n%[        U5      U l)        U RU                  US9 n&[W        U5       GH^  u  n'n(U RX                  (       a  M  U R$                  (       a  [8        R:                  " U/S-  5      OUn)[[        U R&                  S5      (       a  U R&                  R]                  U)U(5      n)[8        R^                  " U(/U)R                  S   -  US9R=                  U)R                  S9n*U R!                  U)U*UUSS9S   n+U+RM                  5       S   U R.                  R0                  R2                  :w  a  U+Ra                  SSS9u  n+n,U R$                  (       a  U+Ra                  S5      u  n-n.U-U	U.U--
  -  -   n+U R$                  (       a  US:  a  [c        U+W.US9n+U R&                  Rd                  " U+U(U40 U$DSS0D6S   nUb\  0 n/U H  n0[g        5       U0   U/U0'   M     U" U U'U(U/5      n1U1Ri                  SU5      nU1Ri                  SU5      nU1Ri                  SU5      nU'[        U5      S-
  :X  d)  U'S-   U%:  a0  U'S-   U R&                  RP                  -  S:X  a  U&Rk                  5         [l        (       d  GMI  [n        Rp                  " 5         GMa     SSS5        US:X  d,  U Rs                  U5      n2U Rt                  Rw                  U2US9n2OUn2U Ry                  5         U(       d  U24$ [{        U2S9$ ! , (       d  f       Ne= f)a9  
Generates images or video using the EasyAnimate pipeline based on the provided prompts.

Examples:
    prompt (`str` or `List[str]`, *optional*):
        Text prompts to guide the image or video generation. If not provided, use `prompt_embeds` instead.
    num_frames (`int`, *optional*):
        Length of the generated video (in frames).
    height (`int`, *optional*):
        Height of the generated image in pixels.
    width (`int`, *optional*):
        Width of the generated image in pixels.
    num_inference_steps (`int`, *optional*, defaults to 50):
        Number of denoising steps during generation. More steps generally yield higher quality images but slow
        down inference.
    guidance_scale (`float`, *optional*, defaults to 5.0):
        Encourages the model to align outputs with prompts. A higher value may decrease image quality.
    negative_prompt (`str` or `List[str]`, *optional*):
        Prompts indicating what to exclude in generation. If not specified, use `negative_prompt_embeds`.
    num_images_per_prompt (`int`, *optional*, defaults to 1):
        Number of images to generate for each prompt.
    eta (`float`, *optional*, defaults to 0.0):
        Applies to DDIM scheduling. Controlled by the eta parameter from the related literature.
    generator (`torch.Generator` or `List[torch.Generator]`, *optional*):
        A generator to ensure reproducibility in image generation.
    latents (`torch.Tensor`, *optional*):
        Predefined latent tensors to condition generation.
    prompt_embeds (`torch.Tensor`, *optional*):
        Text embeddings for the prompts. Overrides prompt string inputs for more flexibility.
    negative_prompt_embeds (`torch.Tensor`, *optional*):
        Embeddings for negative prompts. Overrides string inputs if defined.
    prompt_attention_mask (`torch.Tensor`, *optional*):
        Attention mask for the primary prompt embeddings.
    negative_prompt_attention_mask (`torch.Tensor`, *optional*):
        Attention mask for negative prompt embeddings.
    output_type (`str`, *optional*, defaults to "latent"):
        Format of the generated output, either as a PIL image or as a NumPy array.
    return_dict (`bool`, *optional*, defaults to `True`):
        If `True`, returns a structured output. Otherwise returns a simple tuple.
    callback_on_step_end (`Callable`, *optional*):
        Functions called at the end of each denoising step.
    callback_on_step_end_tensor_inputs (`List[str]`, *optional*):
        Tensor names to be included in callback function calls.
    guidance_rescale (`float`, *optional*, defaults to 0.0):
        Adjusts noise levels based on guidance scale.

Returns:
    [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:
        If `return_dict` is `True`, [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] is returned,
        otherwise a `tuple` is returned where the first element is a list with the generated images and the
        second element is a list of `bool`s indicating whether the corresponding generated image contains
        "not-safe-for-work" (nsfw) content.
r   FNr   r   )r   rB   rC   r   r   r   r   r   r   r   text_encoder_index)muT)rq      r"   r   r   )r   r   )rC   rl   r   )totalscale_model_input)encoder_hidden_statescontrol_latentsr  r:   )re   r  r   r   r   rp   )videor  )frames)>r#   r   r   tensor_inputsrM   r   r  r  r  r   r`   r   r   _execution_devicer   rC   r   r   r   r   r   r   ry   r   r   latent_channelsr   rw   r$   rn   rA   r   
preprocessr2   reshapefloat32r  rD   r   r   orderr  progress_bar	enumerater  r   r&  tensorchunkri   r   localspopupdateXLA_AVAILABLExm	mark_stepdecode_latentsr   postprocess_videomaybe_free_model_hooksr   )3r   r   rF   r   r   r  r  rH   rx   r
  r   r   r   r   r   r   r   r   r   r  r  r   r   re   ry   r   rB   rC   r   control_video_latentsr(  channelsheight_videowidth_videoref_image_latentsref_image_latents_conv_inr   num_warmup_stepsr2  r   tlatent_model_inputt_expand
noise_predr   noise_pred_uncondrd   callback_kwargsr   callback_outputsr)  s3                                                      r6   __call__#EasyAnimateControlPipeline.__call__  s)	   j *-=?U,VWW1E1S1S. flb()Ub[B&' 	"!*.
	
  .!1 *VS"9"9JJvt$<$<VJ&,,Q/J''(%%++E$$**E "7(,(H(H+'#9"7+I   
	
"!*  dnn&EFF-? 3VY1.*I* .@Pcekmv-w*I*NN,,	  $xx>>&&.. 

  +$/0Dhl$m!$9A$=!:>:Z:Z		01A56`ub  &JWJ]J]GJ*lK 00;;%%aAq!4<<+X|[  < M *,,5==,AM)11*j(TZbjj1aAM %)$@$@00
% 
%! ;?:Z:Z		01A56`ub  %*$4$4W$=$@$@$O!:>:Z:Z		01A56`ub   JS//GJ*lK,,77!!!Q1a088j9PRZ\hjuv 8 I
 "5==9I!))*j(FZbbcdfgijlmopqI $ < <##00
! 
! ).(8(8(A%||~a A%6G)!Q(3 33 		459:.b	 &
 $ii:S(TZ[\O(-(8(8(A% 33 		459:.b	 &
 $ii:S(TZ[\O !::9J++!II'=}&MNM$)II/MOd.e$f! &(((7 5 8 8 8 G y>,?..BVBV,VV!)n%89\!),1>> BFAaAaUYYy1}%=gn"4>>+>??)-)I)IJ\^_)`& !<<.@.F.Fq.I(IRXY\\,22 ]  "--&*7$3 % .  
 ??$Q'488??+J+JJ$.$4$4QA$4$>MJ 339C9I9I!9L6%!2^YjGj5k!kJ338H38N!2:aq!rJ ..--j!WmHYmglmnop'3&(O?-3Xa[* @';D!Q'X$.229gFG$4$8$8-$XM-=-A-ABZ\r-s*I**A9I/IqSTuX\XfXfXlXlNlpqNq '') =LLNe - :l h&''0E((::T_:`EE 	##%8O(66E :9s   Icc
c$)
r  r  r  r  r   r   r   r   r   r   )
r   TNNNNNNN   )NNNNNNr   )1__name__
__module____qualname____firstlineno____doc__model_cpu_offload_seqr   r   r   r   r	   r   r
   r   r   r   r   r   rM   boolr   r$   r%   rB   rC   r   r   r   r   r  propertyr
  re   r   r  r  no_gradr   EXAMPLE_DOC_STRINGFloatTensorr3   	Generatorr   r   r   r   rM  __static_attributes____classcell__)r   s   @r6   r   r      sV   * =T$c $c ;YFG$c 67	$c
 3$c 3$cT &',0;?049=8<AE)-'+#&`lc49n%`l  #`l &*	`l
 "%T#Y"78`l  -`l !) 6`l  (5`l )1(>`l &`l $`l !`lF!, #"'++/4n nr4!.F $ $ & & ( ( # #   ]]_12 )-$& #"269=.2-/*-;?/0"MQ*.049=8<AE%*  9B"%)-7B7c49n%B7 SMB7 	B7
 }B7 U../B7 $E$5$56B7 **+B7 &c]B7 !B7 "%T#Y"78B7  (}B7 e_B7 E%//43H"HIJB7 %,,'B7   -!B7" !) 6#B7$  (5%B7& )1(>'B7( c])B7* +B7, '(Cd+T124DF\\]
-B72 -1I3B74  5B76 DI&7B7 3 B7r8   r   r=   )r:   )T)NNNN)?r~   typingr   r   r   r   r   numpyr,   r$   torch.nn.functionalr&   r'   rm   PILr   transformersr	   r
   r   r   	callbacksr   r   r   r   modelsr   r   pipelines.pipeline_utilsr   
schedulersr   utilsr   r   r   utils.torch_utilsr   r   r   pipeline_outputr   torch_xla.core.xla_modelcore	xla_modelr:  r9  
get_loggerrP  loggerrY  r7   rK   r[   ri   rw   rM   r   rB   r3   r   r   r|   r8   r6   <module>ro     s     8 8       B 0 H 9 9 O O - - 6 ))MM			H	%' T6!4JW&68 *.15%)$(8*!#8* U3,-.8* S	"	8*
 T%[!8*vB7!2 B7r8   