
    oi                       S SK Jr  S SKrS SKrS SKrS SKJr  S SKJr  S SK	J
r
Jr  S SKJrJr  S SKrS SKrS SKrS SKJr  S SKJrJr  S S	KJrJrJr  S S
KJrJrJrJrJ r J!r!J"r"  S SK#J$r$J%r%J&r&J'r'J(r(  S SK)J*r*  SSK+J,r,  SSK-J.r.  SSK/J0r0  SSK1J2r2  SSK3J4r4  SSK5J6r6  SSK7J8r8  SSK9J:r:J;r;J<r<J=r=  SSK>J?r?  SSK@JArA  S rBS rC " S S\5      rDg)    )annotationsN)contextmanager)replace)partialreduce)LiteralOptional)nn)is_bnb_4bit_availableis_bnb_available)	BaseTunerBaseTunerLayerreplicate_layers)2TRANSFORMERS_MODELS_TO_LORA_TARGET_MODULES_MAPPINGAuxiliaryTrainingWrapperModulesToSaveWrapper_freeze_adapter_get_submodulesget_peft_model_state_dictget_quantization_config)dare_linear	dare_tiesmagnitude_prunetask_arithmeticties)get_pattern_key   )dispatch_aqlm)dispatch_awq)
LoraConfig)dispatch_eetq)dispatch_gptq)dispatch_hqq)dispatch_inc)Conv2d	LoraLayerParamWrapperdispatch_default)dispatch_torchao)dispatch_megatronc                    X2S'   X4$ )Nadapter_names )targetargskwargsr,   s       P/home/james-whalen/.local/lib/python3.13/site-packages/peft/tuners/lora/model.py_adapter_names_pre_forward_hookr2   ;   s    +?<    c                    X2S'   X4$ )Nalora_offsetsr-   )r.   r/   r0   r5   s       r1   _alora_offsets_pre_forward_hookr6   A   s    +?<r3   c                    ^  \ rS rSr% SrSrS\S'   \r\	r
SS jrSS.   SS	 jjrS
 r\S 5       r\S 5       rU 4S jrS r        SS jr       S                     SS jjr   SS jrS rSSS jjrS rSrU =r$ )	LoraModelF   a
  
Creates Low Rank Adapter (LoRA) model from a pretrained transformers model.

The method is described in detail in https://huggingface.co/papers/2106.09685.

Args:
    model ([`torch.nn.Module`]): The model to be adapted.
    config ([`LoraConfig`]): The configuration of the Lora model.
    adapter_name (`str`): The name of the adapter, defaults to `"default"`.
    low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
        Create empty adapter weights on meta device. Useful to speed up the loading process.

Returns:
    `torch.nn.Module`: The Lora model.

Example:

    ```py
    >>> from transformers import AutoModelForSeq2SeqLM
    >>> from peft import LoraModel, LoraConfig

    >>> config = LoraConfig(
    ...     task_type="SEQ_2_SEQ_LM",
    ...     r=8,
    ...     lora_alpha=32,
    ...     target_modules=["q", "v"],
    ...     lora_dropout=0.01,
    ... )

    >>> model = AutoModelForSeq2SeqLM.from_pretrained("t5-base")
    >>> lora_model = LoraModel(model, config, "default")
    ```

    ```py
    >>> import torch
    >>> import transformers
    >>> from peft import LoraConfig, PeftModel, get_peft_model, prepare_model_for_kbit_training

    >>> rank = ...
    >>> target_modules = ["q_proj", "k_proj", "v_proj", "out_proj", "fc_in", "fc_out", "wte"]
    >>> config = LoraConfig(
    ...     r=4, lora_alpha=16, target_modules=target_modules, lora_dropout=0.1, bias="none", task_type="CAUSAL_LM"
    ... )
    >>> quantization_config = transformers.BitsAndBytesConfig(load_in_8bit=True)

    >>> tokenizer = transformers.AutoTokenizer.from_pretrained(
    ...     "kakaobrain/kogpt",
    ...     revision="KoGPT6B-ryan1.5b-float16",  # or float32 version: revision=KoGPT6B-ryan1.5b
    ...     bos_token="[BOS]",
    ...     eos_token="[EOS]",
    ...     unk_token="[UNK]",
    ...     pad_token="[PAD]",
    ...     mask_token="[MASK]",
    ... )
    >>> model = transformers.GPTJForCausalLM.from_pretrained(
    ...     "kakaobrain/kogpt",
    ...     revision="KoGPT6B-ryan1.5b-float16",  # or float32 version: revision=KoGPT6B-ryan1.5b
    ...     pad_token_id=tokenizer.eos_token_id,
    ...     use_cache=False,
    ...     device_map={"": rank},
    ...     torch_dtype=torch.float16,
    ...     quantization_config=quantization_config,
    ... )
    >>> model = prepare_model_for_kbit_training(model)
    >>> lora_model = get_peft_model(model, config)
    ```

**Attributes**:
    - **model** ([`~transformers.PreTrainedModel`]) -- The model to be adapted.
    - **peft_config** ([`LoraConfig`]): The configuration of the Lora model.
lora_strprefixc                R    UR                   (       a  [        X!R                   5        gg)z
A private method to modify the model structure before adapter is applied.

Args:
    peft_config (`PeftConfig`):
        The prepared adapter config.
    model (`nn.Module`):
        The model that is going to be adapted.
N)layer_replicationr   )selfpeft_configmodels      r1   _prepare_modelLoraModel._prepare_model   s      ((U$A$AB )r3   N)parameter_namec               H  ^ Uc  [        S5      eUR                  (       aM  [        U4S jU R                  R	                  5        5       5      nU(       a  [        SUR                   S35      e[        UR                  R                  5       U5      n	[        UR                  R                  5       U5      n
UR                  R                  XR                  5      nUR                  R                  XR                  5      n0 SU_SU_SUR                  _SUR                  _S	UR                  _S
UR                  _SUR                   _SUR"                  S L_SUR$                  _SUR&                  _SUR(                  R*                  _SUR,                  _SUR.                  _S[1        U R2                  SS5      _S[1        U R2                  SS5      _SU_n [4        R6                  " S5      " U R2                  5      US'   / SQnU H#  n[;        U R2                  US9nUc  M  UX S3'   M%     SSKJn  [A        U[B        5      =(       a    TURD                  ;   n[A        U[F        5      (       aw  [A        UU5      (       df  U(       d_  URI                  TUUUR                  UR                  UR                  UR                   UR,                  UR.                  URJ                  S9
  g [A        U[B        5      (       a  XsRL                  :X  a  [        S 5      e[O        U R2                  S!5      (       a  U R2                  RP                  OS nU RR                  " UTU4S"U0UD6nTU RT                  ;  a  URW                  S5        U RY                  XTUU5        g ! [8         a     GNf = f)#NzCurrent Key shouldn't be `None`c              3  P   >#    U  H  u  pUT:w  d  M  UR                   v   M     g 7fN)target_parameters).0keyconfadapter_names      r1   	<genexpr>0LoraModel._create_and_replace.<locals>.<genexpr>   s)      28P93TW[gTg&&&8Ps   &&z-Adding a LoRA config with `target_parameters=z` but there are already other LoRA adapters on this model that use `target_parameters`. At the moment, only one LoRA adapter per model with `target_parameters` is allowed.r
lora_alphalora_dropoutfan_in_fan_outinit_lora_weights
use_rslorause_dora	use_alora
use_qaloraqalora_group_sizeephemeral_gpu_offload	lora_biasarrow_configloaded_in_8bitis_loaded_in_8bitFloaded_in_4bitis_loaded_in_4bitrD   z:hf_quantizer.quantization_config.get_apply_tensor_subclassget_apply_tensor_subclass)gptqaqlmawq)method_quantization_configr   )AdaLoraLayer)rP   rQ   rS   rT   rU   rZ   r[   inference_modezTrying to target the same nn.Parameter twice, this should not happen. Please open an issue on the PEFT repo: https://github.com/huggingface/peft/issueshf_device_map
device_map)-
ValueErrorrH   anyr@   itemsr   rank_patternkeysalpha_patterngetrO   rP   rQ   rR   rS   rT   rU   alora_invocation_tokensrW   rX   runtime_configrY   rZ   r[   getattrrA   operator
attrgetterAttributeErrorr   peft.tuners.adalorarf   
isinstancer'   lora_Ar&   update_layerrg   rD   hasattrrh   _create_new_moduleactive_adaptersrequires_grad__replace_module)r?   lora_configrL   r.   target_nameparentcurrent_keyrD   other_configs_use_target_paramsr_key	alpha_keyrO   alphar0   quant_methodsquant_methodquantization_configrf   wrap_target_paramri   
new_modules     `                  r1   _create_and_replaceLoraModel._create_and_replace   s    >??((.1 28<8H8H8N8N8P2 /+ / CKDaDaCb cV V    8 8 = = ?M#K$=$=$B$B$DkR	$$((>))--i9O9OP

%
 K44
 k88	

  !>!>
 +00
 ,,
 <<DH
 +00
  !>!>
 $[%?%?%U%U
 ..
 K44
 gdjj2EuM
 gdjj2EuM
  n!
(	2:2E2EL3jj3F./ 0)L"9$**\"Z".@S';<= * 	5 'v|<`,RXR_R_B_fi((FL1Q1QZk (55"-"?"?&11$--%//(55*99    &,//^G\G\5\ L  6=TZZ5Y5Y11_cJ00lFt_itmstJ4#7#77))%0  j&IM  		s   )N 
N! N!c                  ^ [        XU5        [        US5      (       a  UR                  n[        R                  " S5      mUR                  5        H  u  pVU R                  U;   d  SU;   d  M  [        US5      (       a  UR                  nOp[        US5      (       a  UR                  nOR[        US5      (       a  UR                  nO4[        USS 5      b  UR                  nO[        UR                  5       5      n[        U4S jUR                  5        5       5      (       a  M  UR                  UR                  5        M     g )	N
base_layermetaranknumqweightW_qweightin_proj_weightc              3  @   >#    U  H  oR                   T:H  v   M     g 7frG   )device)rI   pr   s     r1   rM   ,LoraModel._replace_module.<locals>.<genexpr>  s     I5H88t+5H   )setattrr{   r   torchr   named_modulesr<   r   r   r   rs   r   next
parametersrk   to)	r?   r   
child_namer   childnamemoduler   r   s	           @r1   r   LoraModel._replace_module   s     	J/
 5,''$$E||F#&446LDt#d):5),,"]]FUE**"YYFUH--"\\FU$4d;G"11F!%"2"2"45FIV5F5F5HIIIIIfmm, 7r3   c                   / nU R                   (       a  S nUR                  U5        [        5       (       a  SSKJn  UR                  U5        [        5       (       a  SSKJn  UR                  U5        UR                  [        [        [        [        [        [        [        [        [         /	5        S nU H  n	U	" X!4SU 0UD6nUc  M    O   Uc  [#        SU S35      eU$ )Nc                    S n[        U [        5      (       a  U R                  5       nOU nUR                  R	                  5        H!  u  pg[        XV5      (       d  M  U" X40 UD6n  U$    U$ rG   )rx   r   get_base_layer_custom_modulesrl   )r.   rL   r   r0   r   target_base_layerrJ   
custom_clss           r1   dynamic_dispatch_func;LoraModel._create_new_module.<locals>.dynamic_dispatch_func%  sq    !
fn55(.(=(=(?%(.%'2'B'B'H'H'JOC!"399%/%O%O
!! (K
 "!r3   r   )dispatch_bnb_8bit)dispatch_bnb_4bitr   zTarget module z is not supported. Currently, only the following modules are supported: `torch.nn.Linear`, `torch.nn.Embedding`, `torch.nn.Conv1d`, `torch.nn.Conv2d`, `torch.nn.Conv3d`, `transformers.pytorch_utils.Conv1D`, `torch.nn.MultiheadAttention.`.)r   appendr   bnbr   r   r   extendr!   r   r   r"   r#   r$   r)   r*   r(   rj   )
r   rL   r.   r0   dispatchersr   r   r   r   
dispatchers
             r1   r|   LoraModel._create_new_module  s     &&" 45 .01 "".01 ! 
	
 
%J#F\k\U[\J% &
   )W W  r3   c           	   /    ^#    TR                  SS 5      nTR                  SS 5      nUc  Uc  S v   g / nUGbz  [        R                  R                  [        R
                  5      [        R                  R                  S5      :  nU(       a  [        S5      eSSKJn  U R                  5        GH   u  p[        X5      (       a  U	R                  (       a  S n
S n[        U	S	/ 5      (       a  [        S
5      e/ U	l        U	R                  [        XUS95      nU	R                  R!                  U5        U	R#                  [        X5      5      nU	R                  R!                  U5        [        U	[$        5      (       d  M  [        [&        US9nU	R                  USS9nUR!                  U5        GM     TR)                  SS 5      n[        U[*        5      =(       a    US:  nU(       a  Ub  [        S5      eUGbM  U R,                  (       a  [        S5      e[/        5       nU R1                  5        HT  n	[        U	[$        5      (       d  M  UU	R2                  R5                  5       -  nUU	R6                  R5                  5       -  nMV     U Vs1 s H  nUS:w  d  M  UiM     nnUU-
  nU(       a&  [        SSR9                  [;        U5      5       35      eUS S  nU(       aI  [        U[<        [>        45      (       d  [A        S[C        U5       S35      e[E        U4S jU 5       / 5      nU R1                  5        H^  n[        U[$        5      (       d  [        U[F        5      (       d  M/  [        [H        US9nUR                  USS9nUR!                  U5        M`     U(       a  [K        U RL                  S5      (       a  U RL                  RO                  5       R1                  5        H^  n[        U[$        5      (       d  [        U[F        5      (       d  M/  [        [H        US9nUR                  USS9nUR!                  U5        M`     S v   U H  nURQ                  5         M     g s  snf 7f)Nr,   r5   z4.52.1z,Using aLoRA requires transformers >= 4.52.1.r   )GradientCheckpointingLayerc                    UR                  5        HT  n[        U[        5      (       d  M  UR                  [	        [
        US   S9SS9nUR                  R                  U5        MV     g )Nr5   r5   Twith_kwargs)modulesrx   r&   register_forward_pre_hookr   r6   *_peft_gradient_checkpointing_forward_hooksr   )r   r   inputsr0   	submodulehandles         r1   forward_pre_hook>LoraModel._enable_peft_forward_hooks.<locals>.forward_pre_hook  sb    )/)9I))Y??)2)L)L$+,K[abq[r$s04 *M *" !' Q Q X XY_ ` *:r3   c                    UR                   (       a<  UR                   R                  5       R                  5         UR                   (       a  M;  g g rG   )r   popremove)r   r   grad_outputr0   s       r1   backward_hook;LoraModel._enable_peft_forward_hooks.<locals>.backward_hook  s4    $OO"MMQQSZZ\ %OOOr3   r   zMultiple invocations of PEFT forward hooks before .backward() with enabled gradient checkpointing. Disable gradient checkpointing or only call forward once per backward.r   Tr   	num_beamsr   z(Beam search not yet supported for aLoRA.z?Cannot pass `adapter_names` when the model is in training mode.__base__z.Trying to infer with non-existing adapter(s): z, zGot adapter names of type z, expected a list of str.c              3  4   >#    U  H  o/TS    -  v   M     g7f)r   Nr-   )rI   nr0   s     r1   rM   7LoraModel._enable_peft_forward_hooks.<locals>.<genexpr>  s     $V1S6++>%>s   )r,   get_encoder))r   	packagingversionparsetransformers__version__rj   transformers.modeling_layersr   r   rx   gradient_checkpointingrs   r   r   r   r   register_full_backward_hookr&   r6   rp   inttrainingsetr   ry   rn   lora_embedding_Ajoinsortedlisttuple	TypeErrortypesumr   r2   r{   rA   r   r   )r?   r/   r0   r,   r5   hook_handlestransformers_lt_4_52r   r   layerr   r   r   pre_forwardr   uses_beam_searchexpected_adaptersr   unique_adaptersunexpected_adaptersoriginal_adapter_namesr   s     `                   r1   _enable_peft_forward_hooks$LoraModel._enable_peft_forward_hooks_  s     

?D9

?D9 ]%:$ $-#4#4#:#:<;S;S#TW`WhWhWnWnX $  $ !OPPO ..0 e@@UEaEaa] u&RTVWW(t  HJED"<<WEUhu=vwFDDKKFS">>w}?XYFDDKKFSeY//")*IYf"gK"<<[VZ<[F ''/K 1L JJ{D1	%i5I9q=( !KLL$}} !bcc
 !$eY//%):):)<<%%)?)?)D)D)FF% ( 1>TAStOT"14E"E" DTYYvViOjEkDlm 
 &31%5"!-$??#&@mAT@UUn$opp !$$V$VXZ [,,.fi00JvG_4`4`")*IYf"gK#==kW[=\F ''/	 )  GDJJ$F$F #jj446>>@F!&)44
6Kc8d8d '..M]s&t!'!A!A+[_!A!`$++F3 A 	"FMMO #I Us5   E8Q*?CQ*
AQ*
Q%Q% CQ*(B(Q*AQ*c                   > [         TU ]  5         [        U R                  SS5      S:X  a  [	        S5      eU R
                  R                  S5      (       a  [	        S5      eg)z{Verify that the configuration supports merging.

Currently gptq quantization and replicated layers do not support merging.
quantization_methodNra   z9Cannot merge LORA layers when the model is gptq quantizedr>   z>Cannot merge LORA layers when base model layers are replicated)super_check_merge_allowedrs   rA   rj   r@   rp   )r?   	__class__s    r1   r   LoraModel._check_merge_allowed  s[    
 	$&4::4d;vEXYY 344]^^ 5r3   c                    UR                   cQ  US   U R                  ;   a"  [        U R                  US      5      Ul         U$ UR                  (       d  [	        S5      eU$ )N
model_typezFPlease specify `target_modules` or `target_parameters`in `peft_config`)target_modulestarget_module_mappingr   rH   rj   )r?   r@   model_configs      r1   _prepare_adapter_config!LoraModel._prepare_adapter_config  sd    %%-L)T-G-GG-01K1KLYeLf1g-h*  !22 !ijjr3   c                  ^ ^ U H9  nU[        T R                  R                  5       5      ;  d  M,  [        SU S35      e   U H0  nT R                  U   R                  (       d  M#  [        SU S35      e   T R                  5        Vs/ s H  n[        U[        5      (       d  M  UPM     nnU V^s/ s H   m[        U4S jU 5       5      S:  d  M  TPM"     nnU(       a  [        S[        U5       S35      e[        U5      S:X  a  S	OUnU 4S
 jU 5        V	s/ s HN  n	U	R                  (       d  U	R                  O-[        U	R                  /U	R                  R                  5       Q76 PMP     n
n	US;   a)  [        [        U
5      5      S:w  a  [        S5      eU
S   nOKUS:X  a  [        U
5      nO9UR                  S5      (       a  U=(       d    [        U
5      nO[        SU 35      eU Vs/ s H%  n[!        T R                  U   R"                  5      PM'     nnU(       d  [        SU 35      e[        [        U5      5      S:  a  [        S5      eUS   [$        L a  SR'                  U 4S jU 5       5      nOCUS   [        L a%  [)        [*        R,                  U 4S jU 5       5      nO[/        SUS    S35      eX+U4$ s  snf s  snf s  sn	f s  snf )zw
Helper function to check if the arguments to add_weighted_adapter are valid and compatible with the underlying
model.
zAdapter z does not existzSadd_weighted_adapter does not support targeting nn.Parameter (problematic adapter 'z')c              3  @   >#    U  H  oTR                   ;   v   M     g 7frG   )modules_to_save)rI   adapterwrappers     r1   rM   8LoraModel._check_add_weighted_adapter.<locals>.<genexpr>  s     NX'g555Xr   r   z\Cannot add weighted adapters if they target the same module with modules_to_save, but found z such instance(s).linearc              3  B   >#    U  H  nTR                   U   v   M     g 7frG   )r@   rI   r   r?   s     r1   rM   r    s     MH4++G4Hs   )r  r   r   r   r   zkAll adapters must have the same r value when using combination_type linear, ties, dare_ties or dare_linear.r   catsvdzInvalid combination_type: z'Found no adapter matching the names in zall adapter configs should follow the same target modules type. Combining adapters with `target_modules` type being a mix of list/set and string is not supported.|c              3  ^   >#    U  H"  nS TR                   U   R                   S3v   M$     g7f)()Nr@   r   r  s     r1   rM   r  ,  s0     )riq^eAd.>.>w.G.V.V-WWX*Yiqs   *-c              3  V   >#    U  H  nTR                   U   R                  v   M      g 7frG   r  r  s     r1   rM   r  /  s$     `W_Gt//8GGW_s   &)zInvalid type z found in target_modules)r   r@   rn   rj   rH   r   rx   r   r   lenrm   rO   maxvaluesr   endswithr   r   r;   r   r   rt   or_r   )r?   adapterscombination_typesvd_rankr   r   modules_to_save_wrappersr   problematic_wrappersconfigadapters_ranksnew_ranktarget_module_typesnew_target_moduless   `      `      r1   _check_add_weighted_adapter%LoraModel._check_add_weighted_adapter  s     Gd4#3#3#8#8#:;; 8G9O!DEE    G(::: ijqirrtu    :>#tv:V\^rKsF #t 4 
3NXNNQRR 3 	  

  n+,--?A  (+8}'98?O
 NHM%
 N #//FHHS5aFDWDWD^D^D`5aaM 	 %
 ``3~&'1, #  &a(H& >*H&&u--63~#6H9:J9KLMM]ef]eRYtD$4$4W$=$L$LM]ef"FxjQRRs&'(1,u 
 q!S(!$)riq)r!r #s*!'`W_`" m,?,B+CC[\]]+===s $u 
%
0 gs%   
K'K4KKAK2,Kc                   U[        U R                  R                  5       5      ;   a  gU R                  UUUS9u  pKn[	        U R                  US      UUU0 0 S9U R                  U'   U R                  U R                  U5        [        U R                  U5        U R                  R                  5        VVs/ s H  u  pU R                  U;  d  M  UPM     nnnU GH  n[        U R                  U5      u  nnn[        U[        5      (       d  M5  UUR                  ;   a3  UR                  U   R                  nUR                  U   R                  nO1UUR                   ;   a  UR                   U   nUR"                  U   nOM  UR$                  S-  Ul        UR$                  S-  Ul        US:X  Gaa  / / nn['        X5       H  u  nnUUR                  ;   a3  UR                  U   R                  nUR                  U   R                  nO1UUR                   ;   a  UR                   U   nUR"                  U   nOMz  UR)                  UR$                  U-  UR*                  U   -  5        UR)                  UR$                  5        M     [-        U5      S:X  a  [/        S5      e[0        R2                  " USS9n[0        R2                  " US	S9nUUR$                  SUR4                  S   2SS24'   UUR$                  SS2SUR4                  S	   24'   GM9  US
;   a*  U R7                  UUUUUUUU	U
UUUS9u  Ul        Ul        GMi  US;   d  GMr  U R9                  XAUUX5      u  Ul        Ul        GM     gs  snnf )a(	  
This method adds a new adapter by merging the given adapters with the given weights.

When using the `cat` combination_type you should be aware that rank of the resulting adapter will be equal to
the sum of all adapters ranks. So it's possible that the mixed adapter may become too big and result in OOM
errors.

Args:
    adapters (`list`):
        List of adapter names to be merged.
    weights (`list`):
        List of weights for each adapter. Weights can be positive or negative, allowing for both addition and
        subtraction of adapter effects.
    adapter_name (`str`):
        Name of the new adapter.
    combination_type (`str`):
        The merging type can be one of [`svd`, `linear`, `cat`, `ties`, `ties_svd`, `dare_ties`, `dare_linear`,
        `dare_ties_svd`, `dare_linear_svd`, `magnitude_prune`, `magnitude_prune_svd`]. When using the `cat`
        combination_type, the rank of the resulting adapter is equal to the sum of all adapters ranks (the
        mixed adapter may be too big and result in OOM errors).
    svd_rank (`int`, *optional*):
        Rank of output adapter for svd. If None provided, will use max rank of merging adapters.
    svd_clamp (`float`, *optional*):
        A quantile threshold for clamping SVD decomposition output. If None is provided, do not perform
        clamping. Defaults to None.
    svd_full_matrices (`bool`, *optional*):
        Controls whether to compute the full or reduced SVD, and consequently, the shape of the returned
        tensors U and Vh. Defaults to True.
    svd_driver (`str`, *optional*):
        Name of the cuSOLVER method to be used. This keyword argument only works when merging on CUDA. Can be
        one of [None, `gesvd`, `gesvdj`, `gesvda`]. For more info please refer to `torch.linalg.svd`
        documentation. Defaults to None.
    density (`float`, *optional*):
        Value between 0 and 1. 0 means all values are pruned and 1 means no values are pruned. Should be used
        with [`ties`, `ties_svd`, `dare_ties`, `dare_linear`, `dare_ties_svd`, `dare_linear_svd`,
        `magnintude_prune`, `magnitude_prune_svd`]
    majority_sign_method (`str`):
        The method, should be one of ["total", "frequency"], to use to get the magnitude of the sign values.
        Should be used with [`ties`, `ties_svd`, `dare_ties`, `dare_ties_svd`]
N)r  r  r  r   )rO   rP   r   ro   rm   g        r  z9No matching LoRAs found. Please raise an issue on GitHub.dimr   )r  ties_svddare_linear_svddare_ties_svdmagnitude_prune_svdfull_matricesdriver)r  r   r   r   r   )r   r@   rn   r  r   inject_adapterrA   r   r   r<   r   rx   r&   ry   r   lora_Br   lora_embedding_Bdatazipr   scalingr  rj   r   r  shape1_svd_generalized_task_arithmetic_weighted_adapter-_generalized_task_arithmetic_weighted_adapter)r?   r  weightsrL   r  r  	svd_clampsvd_full_matrices
svd_driverdensitymajority_sign_methodr  r  rJ   _key_listr.   target_lora_Atarget_lora_Bloras_Aloras_Br   r   current_adapter_lora_Acurrent_adapter_lora_Bs                            r1   add_weighted_adapterLoraModel.add_weighted_adapter6  sf   l 4 0 0 5 5 7889=9Y9Y- :Z :
6$6 *1Xa[)-*
& 	DJJ5 	

L1&*jj&>&>&@[&@FCDKKWZDZC&@[C*4::s;LAvq&),,6==0$*MM,$?$F$FM$*MM,$?$F$FM!V%<%<<$*$;$;L$IM$*$;$;L$IM%2%7%7#%="%2%7%7#%="#u,')2WG+.x+A"fmm35;]]75K5R5R25;]]75K5R5R2$(?(??5;5L5LW5U25;5L5LW5U2$'='B'BV'Kfnn]dNe'ef'='B'BC ,B 7|q(()dee#iiQ7G#iiQ7G@GM&&'9q)9'91'<=@GM&&q*<GMM!,<*<'<=% *  >B=s=s(  %%,!&7) >t >:M&(: &)jj=A=o=o(GVW>:M&(:q  \s   7M5M5c                0  ^ / n/ n[        U4S jU 5       5      n[        X#5       HZ  u  nnUTR                  ;   d  UTR                  ;   d  M(  UR	                  U5        UR	                  UTR
                  U   -  5        M\     [        U5      S:X  a  [        S5      eU Vs/ s H  nTR                  U5      PM     nn[        R                  " U5      R                  US   R                  5      nUS:X  a  [        UU5      nO\US:X  a  [        UXU	5      nOHUS:X  a  [        UX5      nO5US:X  a  [!        UXU	5      nO!US:X  a  [#        UX5      nO[        S	U 35      e[%        T[&        5      nU(       aG  TR(                  R+                  5       S
S S:H  nU(       d  UR-                  SS9nOUR/                  5       n[1        TS5      (       a  TR2                  (       d  U(       a  UR4                  n[        R6                  R9                  UXS9u  nnnUS S 2S U24   nUS U nU[        R:                  " U5      -  nUS U2S S 24   nU
br  [        R<                  " UR-                  5       UR-                  5       /5      n[        R>                  " UU
5      nU* nURA                  UU5      nURA                  UU5      nU(       aJ  URC                  URD                  RF                  5      nURC                  URD                  RF                  5      nUU4$ s  snf )Nc              3  @   >#    U  H  oTR                   ;   v   M     g 7frG   )r   )rI   r   r.   s     r1   rM   NLoraModel._svd_generalized_task_arithmetic_weighted_adapter.<locals>.<genexpr>  s     VX'f&=&==Xr   r   z9No matching LoRAs found. Please raise an issue on Github.r  r!  r"  r#  r$  z*Invalid value passed to combination type:       )r   r   r   )	start_dimrR   r%  )$rk   r,  ry   r   r   r-  r  rj   get_delta_weightr   tensorr   r   r   r   r   r   r   rx   r%   r   sizeflattensqueezer{   rR   Tlinalgr  diagr  quantileclampreshaper+  r.  )r?   r  r  r1  r  r.   r9  r:  r5  r6  rP  r&  r'  valid_adaptersvalid_weightsis_embeddingr   r   delta_weightconv2d
conv2d_1x1USVhdisthi_vallow_vals        `                     r1   r/  ;LoraModel._svd_generalized_task_arithmetic_weighted_adapter  s    VXVV"85OGV&--'7f6M6M+M%%g.$$VfnnW.E%EF  6 ~!#XYYHVWW//8W]366|A7M7MNu$*<GL+mFZ[L!22&|]LL0$\=K_`L!66*<PLIJZI[\]]FF+++-a2f<J+33a3@+335F,--&2G2GL'>>L <<##L#]1ba(lOixL

1		199aiik2::<89D^^D%0FgG(A'6*B		-,,223AM..445B1uQ Xs   "Lc           	        / n/ n/ n	[        X#5       H  u  pXR                  ;   a3  UR                  U
   R                  nUR                  U
   R                  nO0XR                  ;   a  UR                  U
   nUR
                  U
   nOMw  XR                  U
   -  nUS:  a  SOSnUR                  U[        R                  " [        U5      5      -  5        UR                  UR                  5        U	R                  UR                  5        M     [        R                  " U5      R                  US   R                  5      nX/nUS   R                   n[#        U5       H  u  nnUS:X  a  [%        UU5      UU'   M  US:X  a  ['        UXuU5      UU'   M5  US:X  a  [)        UXu5      UU'   ML  US:X  a  [+        UXuU5      UU'   Md  US:X  a  [-        UXu5      UU'   M{  [/        S	5      e   U Vs/ s H  nUR                  U5      PM     nnU$ s  snf )
Nr   r   r  r   r   r   r   zInvalid combination type)r,  ry   r   r)  r   r*  r-  r   mathsqrtabsr+  r   rH  r   r   dtype	enumerater   r   r   r   r   rj   )r?   r  r  r1  r.   r5  r6  rS  lora_A_deltaslora_B_deltasr   r   r=  r>  weight_with_scalingsignlora_deltasrd  itask_tensorsdeltas                        r1   r0  7LoraModel._generalized_task_arithmetic_weighted_adapter  s    "85OG--')/w)?)F)F&)/w)?)F)F&333)/)@)@)I&)/)@)@)I&"(>>'+B"B+q01bD  		#6I2J(K!KL  !7!<!<=  !7!<!<=  6 ]366}Q7G7N7NO$4a &&(5OA|8+!0}!MA!V+!%lML`!aA!]2!,\=!RA![0!*<Qe!fA!%66!0}!VA !;<<  6 5@@K5uxxK@ As   *H	c           
     *   U R                   R                  5        H  u  pEUR                  R                  [        R
                  :w  d  M/  UR                  R                  [        R                  :w  d  MY  UR                  R                  [        R                  :w  d  M  UR                  S5      (       d  M  [        R                  " S5        M     [        U UR                  SS5      US9n0 nUR                  5        H  nSU;   aA  [        R                  " X   USR                  UR!                  S5      SS 5         /S	S
9Xt'   MJ  SU;   d  MR  [        R                  " X   USR                  UR!                  S5      SS 5         * /SS
9Xt'   M     U$ )a  
This function can calculate the updates of the PiSSA/CorDA/OLoRA by comparing the parameters of the
PiSSA/CorDA/OLoRA adapter in `output_state_dict` with the initial values of PiSSA/CorDA/OLoRA in
`adapter_name`, thus converting PiSSA/CorDA/OLoRA to LoRA.
pissaa   Note that Quant(W_res) + AB != Quant(W) + \Delta(AB); the converted LoRA, when combined with W or Quant(W), may introduce a certain gap in the fine-tuned model. Therefore, we recommend directly using the Quant(W_res) in conjunction with the PiSSA adapter. 
state_dictN)rq  rL   ry   .r   r   r  r)  )rA   named_parametersr+  rd  r   float32float16bfloat16
startswithwarningswarnr   rp   rn   r  r   split)r?   output_state_dictrL   r0   r   parammutated_init_state_dicttensors_loras           r1   subtract_mutated_initLoraModel.subtract_mutated_init2  s`     ::668KD

  EMM1JJ$$5JJ$$6))'22v 9 #<zz,5%#

 %**,D 4%*YY&,.EchhtzzZ]_`_aObFc.dekl&" T!%*YY&,/FsxxPTPZPZ[^P_`a`bPcGd/e.eflm&" - r3   c                j    [        [        US/ 5      =(       d    / 5      n[        U5      U-
  nXAl        g )Nr   )r   rs   modules_to_tie)r?   r@   tied_weight_keysr   missing_keyss        r1   _add_modules_to_tieLoraModel._add_modules_to_tieX  s1    gk3DbIORP+,>%1"r3   r-   )r@   r    rA   z	nn.Module)rD   zOptional[str]returnNone)r  	list[str]r  r;   r  
int | Noner  ztuple[str, int, str])r  NNTNNtotal)r  r  r1  zlist[float]rL   r;   r  r;   r  r  r2  r  r3  boolr4  z
str | Noner5  zfloat | Noner6  zLiteral['total', 'frequency']r  r  )NTNrG   )r{  zdict[str, torch.Tensor]rL   r;   )__name__
__module____qualname____firstlineno____doc__r<   __annotations__r&   tuner_layer_clsr   r   rB   r   r   staticmethodr|   r   r   r   r   r  r?  r/  r0  r  r  __static_attributes____classcell__)r   s   @r1   r8   r8   F   sr   FP FCONC, )-^J &^J 
^J@-8 @ @D r rh	_M>!M>58M>DNM>	M>h !&# $"&!% $>EGG G 	G
 G G G  G G G <G 
Gh BH-^$L2 2r3   r8   )E
__future__r   ra  rt   rx  
contextlibr   dataclassesr   	functoolsr   r   typingr   r	   packaging.versionr   r   r   r
   peft.import_utilsr   r   peft.tuners.tuners_utilsr   r   r   
peft.utilsr   r   r   r   r   r   r   peft.utils.merge_utilsr   r   r   r   r   peft.utils.otherr   rb   r   rc   r   r  r    eetqr!   ra   r"   hqqr#   incr$   r   r%   r&   r'   r(   torchaor)   tp_layerr*   r2   r6   r8   r-   r3   r1   <module>r     s    #    %  % $     E 
   b a ,        D D % '
V2	 V2r3   