
    oin                   d   S SK Jr  S SKrS SKrS SKrS SKrS SKrS SKrS SKJ	r	J
r
  S SKJr  S SKJrJr  S SKJrJrJrJr  S SKrS SKJr  S SKJrJr  S S	KJr  S S
KJr  S SKJr  S SKJ r   S SK!J"r"  S SK#J$r$  S SK%J&r&  S SK'J(r(J)r)J*r*J+r+J,r,  S SK-J.r.  S SK/J0r0J1r1J2r2J3r3J4r4  S SK5J6r6J7r7  S SK8J9r9  SSK:J;r;  SSK<J=r=  SSK>J?r?  \R                  " \R                  5      \R                  " S5      :  rB\B=(       a    \R                  R                  5       rE\S 5       rFS1S jrGS2S jrH " S S\R                  \	5      rJ " S  S!\	5      rK      S3S" jrL " S# S$5      rMS4S% jrNS5S6S& jjrOS7S' jrPS8S9S( jjrQS:S;S* jjrRS<S+ jrSS)\K4       S=S, jjrTS>S- jrU\K4         S?S. jjrVS@SAS/ jjrWS@SBS0 jjrXg)C    )annotationsN)ABCabstractmethod)Sequence)contextmanagernullcontext)AnyOptionalUnionoverload)AlignDevicesHook)named_module_tensorsoffload_state_dict)version)nn)tqdm)PreTrainedModel)Conv1D)PEFT_TYPE_TO_PREFIX_MAPPING)INCLUDE_LINEAR_LAYERS_SHORTHAND)DUMMY_MODEL_CONFIGDUMMY_TARGET_MODULESEMBEDDING_LAYER_NAMES#MIN_TARGET_MODULES_FOR_OPTIMIZATIONSEQ_CLS_HEAD_NAMES)init_empty_weights)AuxiliaryTrainingWrapper%_get_module_names_tied_with_embedding_set_adaptermatch_target_against_key set_additional_trainable_modules)PeftTypeTaskType)PeftWarning   )
PeftConfig)_get_submodules   )
BufferDictz2.5.0c              #    #    / nU R                  5        H  u  p#US;   a  M  [        US5      (       d  M   [        UR                  [        5      (       d  MA  UR                  R
                  (       d  M^  UR                  R                  U5        UR                  U5        M     Sn[        U S5      (       Ga  [        U R                  S5      (       Ga  [        U R                  R                  [        5      (       Ga  U R                  R                  R
                  (       Ga  [        R                  " S5      U R                  R                  R                  R                  5       ;   Ga+  [        U R                  R                  R                  S5      (       a  U R                  R                  R                  R                  R                  n[!        [#        U R                  R                  R                  R                  5      R%                  5       5      S   nXV   S   n/ n[&        R(                  R+                  U5       H-  n	S	U	;   a  UR                  U	5          OUR                  U	5        M/     [&        R(                  R,                  " U6 n
U
S
-   nU R                  R                  R                  U R                  5        SnSv   U H3  nUR                  R/                  U[        R0                  " / 5      5        M5     U(       Ga6  [3        U R                  5       VVs0 s H  u  p,X,R5                  S5      _M     snnU R                  R                  l        [        R                  " S5      U R                  R                  R                  R                  5       ;   aY  [        U R                  R                  R                  S5      (       a*  [7        WU R                  R                  R                  5        U R                  R                  R/                  U R                  [        R0                  " / 5      5        ggs  snnf 7f)a  
A utility for modifying a module containing one or more tuners and a base layer, any of which are offloaded to the
CPU or disk. Moves a module's sub-modules to the execution device before some action is performed, after that the
base layer state dictionary is re-assigned (if that layer was offloaded to the disk) and finally the parameters are
offloaded.

If the module has no offloaded sub-modules, this function does nothing.

Args:
    layer ('torch.nn.Module'):
        layer with tuners to be merged
) 
base_layer_hf_hookFr,   metadatasetr   safetensors_filez--z-mergedTNcpu)named_moduleshasattr
isinstancer-   r   offloadpre_forwardappendr,   torchdeviceoriginal_devicesvaluesweights_mapr/   indexlistdictkeysospathsplitjoinpost_forwardtensorr   tor   )layeroffloaded_modulesnamemodulebase_layer_offloadr=   module_name	file_namebase_name_arri	base_namesafetensors_filenameparams                R/home/james-whalen/.local/lib/python3.13/site-packages/peft/tuners/tuners_utils.pyonload_layerrU   A   s7     ++-%%6:&&:fooGW+X+X]c]l]l]t]t]tOO''/$$V, . ul##  *--u''002BCC%%--- <<5#3#3#<#<#M#M#T#T#VV[b%%119\
 \
 $$--99AAGGEtE$4$4$=$=$I$I$Q$QRWWYZ[\]K*+=>IMWW]]9-19!((+$$Q'	 .
 m4I#,y#8 !!--e.>.>?!	#$$VU\\"-=> $  6J%JZJZ5[1
5[kdD((5/!5[1
!!- <<5#3#3#<#<#M#M#T#T#VV[b%%119\
 \
 3U5E5E5N5N5Z5Z[!!..u/?/?bAQR 1
s$   /QQQ3J0Q#Q DQc                   1 SknSS1n1 SknU R                   U;   aj  [        US5      (       aX  [        UR                  SS5      U;   a<  X$;   a6  [	        SU R                    S	U S
UR                  R
                   SU S3	5      egggg)X
Prevent applying LoRA to incompatible modules in specific architectures (e.g., Mamba).
>   LORAXLORAADALORARANDLORAout_projconv1d>   mambamamba2	falcon_h1falcon_mambaconfig
model_typeNz[PEFT:z
] Module 'z7' is incompatible with Mamba-based models (model_type='z'). Incompatible modules: zG. Please remove it from `target_modules` to avoid compatibility issues.)	peft_typer3   getattrrb   
ValueErrorrc   )peft_configmodeltarget_namelora_like_typesincompatible_modulesmamba_model_typess         rT    _check_lora_target_modules_mambarm      s    
 ?O&1H 	0E8$$ELL,59JJ.../z+ G  % 7 788RSgRh iXX  / K % 	1    c                   [        U [        R                  5      (       a  [        (       af  [        U R                  [
        R                  R                  R                  5      (       a)  U R                  R                  5       R                  u  pX!4$ U R                  U R                  p X!4$ [        U [        R                  5      (       a  U R                  U R                  pX!4$ [        U [        R                   5      (       a  U R                  U R                  pX!4$ [        U [        R"                  5      (       a  U R                  U R                  pX!4$ [        U [        R$                  5      (       a  U R&                  U R(                  pX!4$ [        U [*        5      (       aL  [-        U R                  S5      (       a  U R                  R.                  OU R                  R                  u  p!X!4$ [        U [        R0                  5      (       a9  U R2                  (       d  [5        S5      eU R6                  SU R6                  -  pX!4$ [-        U S5      (       a+  [-        U S5      (       a  U R8                  U R:                  pX!4$ [-        U S5      (       a+  [-        U S5      (       a  U R<                  U R>                  pX!4$ [-        U S5      (       a4  U R@                  RB                  S	:X  a  U R                  U R                  pX!4$ [-        U S
5      (       a4  U R@                  RB                  S:X  a  U R                  U R                  pX!4$ U R@                  RB                  S:X  a  U R                  U R                  pX!4$ [-        U S5      (       a4  U R@                  RB                  S:X  a  U R                  U R                  pX!4$ U R@                  RB                  S:X  a  U R                  U R                  pX!4$ [-        U S5      (       a)  [-        U S5      (       a  U R                  U R                  pOSu  p![D        RF                  " S[I        U 5       S3[J        5        X!4$ )am  
Get the in_features and out_features of the layer.

Returns in_features and out_features as a tuple. If they cannot be determined, return a tuple of None and None.
This function covers a broad range of layers, some of which the caller might not support. Therefore, just because
this function returns a valid result does not imply that the layer type is supported.
ds_shapezPOnly same dim for query/key/value is supported as of now for MultiheadAttention.   
infeaturesoutfeatures
input_sizeoutput_size	codebooksQuantizedLinearw_bitWQLinear_GEMM
EetqLinearW_q	HQQLinearPatchedLinearin_featuresout_features)NNzUnsupported layer type 'z(' encountered, proceed at your own risk.)&r4   r   Linear_torch_supports_distributedweightr8   distributedrF   DTensorto_localshaper~   r   Conv1din_channelsout_channelsConv2dConv3d	Embeddingnum_embeddingsembedding_dimr   r3   rp   MultiheadAttention_qkv_same_embed_dimrf   	embed_dimrr   rs   rt   ru   	__class____name__warningswarntypeUserWarning)rK   r   r~   s      rT   _get_in_out_featuresr      s    &"))$$&&:fmmUEVEVE]E]EeEe+f+f(.(>(>(@(F(F%L^ $$[ )/(:(:F<O<OZ $$Y 
FBII	&	&$*$6$68K8K\V $$U 
FBII	&	&$*$6$68K8K\R $$Q 
FBII	&	&$*$6$68K8K\N $$M 
FBLL	)	)$*$9$96;O;O\J $$I 
FF	#	#&-fmmZ&H&HFMM""fmmNaNa 	"F $$A 
FB11	2	2))opp$*$4$4a&:J:J6J\: $$9 
	&	&76=+I+I$*$5$5v7I7I\4 $$3 
	&	&76=+I+I$*$5$5v7I7I\. $$- 
	%	%&*:*:*C*CGX*X$*$6$68K8K\( $$' 
	!	!f&6&6&?&??&R$*$6$68K8K\" $$! 
			"	"l	2$*$6$68K8K\ $$ 
		F$4$4$=$=$L$*$6$68K8K\ $$ 
			"	"o	5$*$6$68K8K\ $$ 6=))gfn.M.M(.(:(:F<O<O(2%K0f>fgitu$$rn   c                    ^  \ rS rSr% SrS\S'   S\S'   S\S'     S-         S.U 4S	 jjjr\S/S
 j5       rS0S jr	S1S jr
S2S jrS3S jr\S4S j5       r\ S5               S6S jj5       rS7S jrS8S9S jjrS:S jrS:S jrS;S jrS8S<S jjrS=S jrS8S>S jjrS r    S?         S@S jjr SA       SBS jjrSCS jrSDS jr  S:S jr   SE           SFS jjr          SGS  jr S:S! jr!SHSIS" jjr"S# r#SJS$ jr$SKSJS% jjr%\SLS& j5       r&SMS' jr'S/S( jr(S) r)SNS* jr*SOU 4S+ jjr+S,r,U =r-$ )P	BaseTuner   a  
A base tuner model that provides the common methods and attributes for all tuners that are injectable into a
torch.nn.Module

For adding a new Tuner class, one needs to overwrite the following methods:

- **_prepare_adapter_config**:
    A private method to eventually prepare the adapter config, for example in case the field `target_modules` is
    missing.
- **_create_and_replace**:
    A private method to create and replace the target module with the adapter module.
- **_check_target_module_exists**:
    A private helper method to check if the passed module's key name matches any of the target modules in the
    adapter_config.

The easiest is to check what is done in the `peft.tuners.lora.LoraModel` class.

Attributes:
    model (`torch.nn.Module`):
        The model to which the adapter tuner layers will be attached.
    forward (`Callable`):
        The forward method of the model.
    peft_config (`Union[`PeftConfig`, dict[str, PeftConfig]]`):
        The adapter configuration object, it should be a dictionary of `str` to `PeftConfig` objects. One can also
        pass a PeftConfig object and a new adapter will be created with the default name `adapter` or create a new
        dictionary with a key `adapter_name` and a value of that peft config.
    config (`dict[str, Any]`):
        The model configuration object, it should be a dictionary of `str` to `Any` objects.
    targeted_module_names (`list[str]`):
        The list of module names that were actually adapted. Can be useful to inspect if you want to quickly
        double-check that the `config.target_modules` were specified correctly.
    targeted_parameter_names (`list[str]`):
        The list of parameter names that were actually adapted. Can be useful to inspect if you want to quickly
        double-check that the `config.target_parameters` were specified correctly.
    prefix (`str`)
        The PEFT-method specific unique prefix. E.g. `"lora_"` for LoRA.
strprefixtype[BaseTunerLayer]tuner_layer_clszdict[str, list[str]]target_module_mappingc                v  > [         TU ]  5         Xl        / U l        / U l        [        U S5      (       d   [        U[        5      (       a  X20OUU l        OU[        R                  " S5        [        U[        5      (       a  X R                  U'   OU R                  R                  U5        X0l        U R                  U R                  U R                  U   U5        U[        R                  :w  d  X#   [        R                  :w  a  U R!                  U R                  X4US9  U R                  U R                  l        g )Nrg   zAlready found a `peft_config` attribute in the model. This will lead to having multiple adapters in the model. Make sure to know what you are doing!)low_cpu_mem_usage
state_dict)super__init__rh   targeted_module_namestargeted_parameter_namesr3   r4   r&   rg   r   r   updateactive_adapter_pre_injection_hookr"   rY   inject_adapter)selfrh   rg   adapter_namer   r   r   s         rT   r   BaseTuner.__init__
  s    	
02"35% t]++>HV`>a>a:grDMMG +z221<  .   ''4/;  T-=-=l-K\Z(..(K,E,W

Ljtu "&!1!1

rn   c                r    [        U R                  [        5      (       a  U R                  /$ U R                  $ Nr4   r   r   r   s    rT   active_adaptersBaseTuner.active_adapters/  0    d))3//''(("""rn   c                :    U R                   R                  " U0 UD6$ r   )rh   forward)r   argskwargss      rT   r   BaseTuner.forward6  s    zz!!42622rn   c                    g)aH  
A hook to be called before the adapter is injected into the model. This method can be overridden by child
classes to perform any pre-injection operations.

Args:
    model (`nn.Module`):
        The model to be adapted.
    config (`PeftConfig`):
        The adapter config.
    adapter_name (`str`):
        The adapter name.
N )r   rh   rb   r   s       rT   r   BaseTuner._pre_injection_hook9  s     	rn   c                    UR                   c<  U R                  R                  US   5      nUc  [        S5      e[	        U5      Ul         U$ )ar  
A private method to prepare the adapter config.

For transformers based models, if `peft_config.target_modules` is None, for some model architectures, we can
automatically infer the target modules from the `TRANSFORMERS_MODELS_TO_XXX_TARGET_MODULES_MAPPING`.

Args:
    peft_config (`PeftConfig`):
        The adapter config.
    model_config (`dict`):
        The transformers model config, that config should contain the `model_type` key.

Returns:
    peft_config (`PeftConfig`):
        The PEFT config with updated `target_modules`.

Raises:
    ValueError:
        Raises an error if the model type was not recognized.
rc   z0Please specify `target_modules` in `peft_config`)target_modulesr   getrf   set)r   rg   model_configr   s       rT   _prepare_adapter_config!BaseTuner._prepare_adapter_configH  sP    * %%-!77;;L<VWN% !STT),^)<K&rn   c                    g)a  
A private method to modify the model structure before adapter is applied.

See `peft.tuner.lora.LoraModel._prepare_model` for an example.

Args:
    peft_config (`PeftConfig`):
        The prepared adapter config.
    model (`nn.Module`):
        The model that is going to be adapted.
Nr   )r   rg   rh   s      rT   _prepare_modelBaseTuner._prepare_modeld  s     	rn   c                    [        X5      $ )a  
A helper method to check if the passed module's key name matches any of the target modules in the
adapter_config.

Args:
    config (`PeftConfig`):
        A config to match target modules from.
    key (`str`):
        A key to search any matches in config.

Returns:
    `bool` | `re.Match[str]` | `None`:
        True or re.Match object if key matches any target modules from config, False or None if no match found.
)check_target_module_exists)rg   keys     rT   _check_target_module_exists%BaseTuner._check_target_module_existsr  s      *+;;rn   c                    g)a  
Inplace replacement of the target module with the adapter layer. This method needs to be overridden by all the
tuner classes.

Check `peft.tuners.lora.LoraModel._create_and_replace` for an example.

Args:
    peft_config (`PeftConfig`):
        The adapter config.
    adapter_name (`str`):
        The adapter name.
    target (`nn.Module`):
        The target module.
    target_name (`str`):
        The target module's name.
    parent (`nn.Module`):
        The parent module.
    current_key (`str`):
        The key of the current target being adapted.
    parameter_name (`str`, *optional*)
        If, and only if, an `nn.Parameter` is being targeted, this is the name of the parameter.
Nr   )r   rg   r   targetri   parentcurrent_keyparameter_names           rT   _create_and_replaceBaseTuner._create_and_replace  s    B 	rn   c                H   UR                  5        H  u  p#U R                  U;  d  M  SUl        M      U R                   H  n[	        U R
                  U   SS5      nUS:X  a  M%  US:X  a*  UR                  5        H  u  p#SU;   d  M  SUl        M     MU  UR                  S5      (       ah  UR                  5        HR  n[        X`R                  5      (       d  M  [        US5      (       d  M2  UR                  c  MA  SUR                  l        MT     M  [        SU S	35      e   g)
zc
A helper method to mark only the adapter layers as trainable (i.e. module.requires_grad = False).
FbiasnoneallT_onlyNzRequested bias: z, is not implemented.)named_parametersr   requires_gradr   re   rg   endswithmodulesr4   r   r3   r   NotImplementedError)r   rh   npr   r   ms          rT    _mark_only_adapters_as_trainable*BaseTuner._mark_only_adapters_as_trainable  s     **,DA{{!#"' - #22N4++N;VVLDv~u}!224DA{*. 5 w''A!!%9%9::wq&?Q?QVWV\V\Vh/3, ) *,<TFBW*XYY 3rn   c                    U R                   R                  5        H1  n[        U[        [        45      (       d  M   UR                  U5        M3     g r   )rh   r   r4   BaseTunerLayerr   enable_adapters)r   enabledrK   s      rT   _set_adapter_layersBaseTuner._set_adapter_layers  s:    jj((*F&>3K"LMM&&w/ +rn   c                    U R                    HA  n[        U R                  U   SS5      nUS:w  d  M%  SU S3n[        R                  " U5        MC     U R                  SS9  g)z|
Disable all adapters in-place.

When disabling all adapters, the model output corresponds to the output of the base model.
r   r   z>Careful, disabling adapter layers with bias configured to be 'zL' does not produce the same output as the base model would without adaption.Fr   N)r   re   rg   r   r   r   )r   r   bias_valmsgs       rT   disable_adapter_layers BaseTuner.disable_adapter_layers  sq     #22Nt//?PH6!TU]T^ _L L  c" 3 	   /rn   c                "    U R                  SS9  g)z
Enable all adapters in-place
Tr   N)r   r   s    rT   enable_adapter_layersBaseTuner.enable_adapter_layers  s    
 	   .rn   c                    U[        U R                  R                  5       5      ;  a  [        SU S35      eU R                  U	 [	        U R
                  XR                  U R                  S9nU=(       d    / U l        g)z`
Deletes an existing adapter.

Args:
    adapter_name (str): Name of the adapter to be deleted.
Adapter z does not exist)rh   r   r   	layer_clsN)	r>   rg   r@   rf   delete_adapterrh   r   r   r   )r   r   new_adapters      rT   r   BaseTuner.delete_adapter  sq     tD$4$4$9$9$;<<x~_EFF\*$**<W[WkWk
 */Rrn   c                ,    [        U R                  XS9  g)"  
Enable or disable gradients on the given adapter(s).

Args:
    adapter_name (`str` or `Sequence[str]`):
        The name of the adapter(s) whose gradients should be enabled/disabled.
    requires_grad (`bool`, *optional*)
        Whether to enable (`True`, default) or disable (`False`).
adapter_namesr   N)set_requires_gradrh   )r   r   r   s      rT   r   BaseTuner.set_requires_grad  s     	$**M_rn   c                  ^ [        U R                  5      S::  a  g[        U4S jU R                  R                  5        5       5      (       d  [	        S5      eU R                  R                  5        Vs/ s H  n[        USS5      PM     nn[        S U 5       5      S:  a"  [	        U R                  R                   S35      egs  snf )	z
A helper method to check the config of a new adapter being added.

Raise a ValueError if there is something wrong with the config or if it conflicts with existing adapters.

r(   Nc              3  *   >#    U  H  oTL v   M
     g 7fr   r   ).0confrb   s     rT   	<genexpr>6BaseTuner._check_new_adapter_config.<locals>.<genexpr>  s     H.Gd6>.Gs   z_check_new_peft_config was called incorrectly, this should not happen. Please open an issue and report the error: https://github.com/huggingface/peft/issuesr   r   c              3  *   #    U  H	  oS :g  v   M     g7f)r   Nr   )r   
bias_values     rT   r  r    s     Bk
V#ks   zf supports only 1 adapter with bias. When using multiple adapters, set bias to 'none' for all adapters.)	lenrg   anyr;   rf   re   sumr   r   )r   rb   r  bias_valuess    `  rT   _check_new_adapter_config#BaseTuner._check_new_adapter_config  s     t A% Hd.>.>.E.E.GHHHO 
 BFAQAQAXAXAZ[AZwtVV4AZ[BkBBQF>>**+ ,7 7  G \s   5Cc                ,    [        U R                  XS9  g)a2  
A helper method to cast the adapter weights to the correct dtype.

Currently, this only upcasts float16 and bfloat16 to float32.

Args:
    adapter_name (`str`):
        The adapter name.
    autocast_adapter_dtype (`bool`, *optional*):
        Whether to autocast the adapter dtype. Defaults to `True`.

)r   autocast_adapter_dtypeN)cast_adapter_dtyperh   )r   r   r  s      rT   _cast_adapter_dtypeBaseTuner._cast_adapter_dtype  s     	4::Lprn   c                    [         R                  " S5      nU R                  U R                  5      nU(       a  [        R
                  " SU< S3U-   5        gg)zHelper method to check whether the adapter can be merged.

Raise a ValueError if it is not possible to merge the adapter with the given configuration.
a   
            ```python
            from transformers import AutoModelForCausalLM

            # Load original tied model
            model = AutoModelForCausalLM.from_pretrained("google/gemma-2-2b-it", tie_word_embeddings=False)

            # Set the randomly initialized lm_head to the previously tied embeddings
            model.lm_head.weight.data = model.model.embed_tokens.weight.data.clone()

            # Save the untied model
            untied_model_dir = "dir/for/untied/model"
            model.save_pretrained(untied_model_dir)
            model.config.save_pretrained(untied_model_dir)

            # Now use the original model but in untied format
            model = AutoModelForCausalLM.from_pretrained(untied_model_dir)
            ```
            BModel with `tie_word_embeddings=True` and the tied_target_modules=z are part of the adapter. This can lead to complications. You can opt to merge the adapter after cloning the weights (to untie the embeddings). You can untie the embeddings by loading the model with `tie_word_embeddings=False`. For example:N)textwrapdedent_get_tied_target_modulesrh   r   r   )r   example_codetied_target_moduless      rT   _check_merge_allowedBaseTuner._check_merge_allowed   sb    
  
* #;;DJJGMMUAT@V Ws s 	 rn   c                   U(       a  U R                  5         U R                  R                  5        VVs/ s H  u  pVU R                  U;  d  M  UPM     nnnSU(       a  SOS-   S-   n[	        Xr(       + US9 H  n [        U R                  U5      u  pn[        U
5         [        U
S5      (       a#  U
R                  XUS9nU R                  XX5        OH[        U
S5      (       a7  U(       a  U
R                  X4S	9  U R                  XU
R                  5       U
5        S S S 5        M     U R                  $ s  snnf ! [         a     M  f = f! , (       d  f       M  = f)
Nz
Unloading zand merging r+   rh   )disabledesc"unload_and_optionally_merge_module)merge
safe_merger   r,   )r  r   )r  rh   r2   r   r   r'   AttributeErrorrU   r3   r  _replace_moduler  get_base_layer)r   r  progressbarr  r   r   _key_listr  r   r   ri   unloaded_modules                rT   _unload_and_optionally_merge&BaseTuner._unload_and_optionally_mergeD  s9    %%'&*jj&>&>&@[&@FCDKKWZDZC&@[~B?'I/EC.=djj#.N+ f%6#GHH&,&O&O#- 'P 'O ((oVV\22
X((f>S>S>UW]^ &% F" zz' \
 " %%s*   D5D5<D; A=E;
E	E	
E	c                "    U R                  XUS9$ )a@  
This method merges the adapter layers into the base model.

This is needed if someone wants to use the base model as a standalone model. The returned model has the same
architecture as the original base model.

It is important to assign the returned model to a variable and use it, this is not an in-place operation!

Args:
    progressbar (`bool`):
        whether to show a progressbar indicating the unload and merge process (default: False).
    safe_merge (`bool`):
        whether to activate the safe merging check to check if there is any potential Nan in the adapter
        weights.
    adapter_names (`List[str]`, *optional*):
        The list of adapter names that should be merged. If None, all active adapters will be merged. Defaults
        to `None`.

Example:

```py
>>> from transformers import AutoModelForCausalLM
>>> from peft import PeftModel

>>> model_id = ...
>>> base_model = AutoModelForCausalLM.from_pretrained(model_id)
>>> peft_model_id = ...
>>> model = PeftModel.from_pretrained(base_model, peft_model_id)
>>> merged_model = model.merge_and_unload()
```
)r#  r  r   r'  )r   r#  r  r   s       rT   merge_and_unloadBaseTuner.merge_and_unloadc  s"    D 00#- 1 
 	
rn   c                     U R                  SS9$ )z
Return the base model by removing all the PEFT modules.

It is important to assign the returned model to a variable and use it, this is not an in-place operation!
F)r  r*  r   s    rT   unloadBaseTuner.unload  s     00u0==rn   c                    [        XU5        g)rW   N)rm   )r   rg   rh   ri   s       rT   !_check_target_module_compatiblity+BaseTuner._check_target_module_compatiblity  s     	)[Irn   c                F    [        U R                  R                   S35      e)Nz) does not support targeting nn.Parameter.)r   r   r   )r   rg   r   r   ri   r   r   s          rT   _create_and_replace_parameter'BaseTuner._create_and_replace_parameter  s"     "T^^%<%<$==f"ghhrn   c                &   U R                   U   n/ n/ n/ n	U R                  U5        U R                  X5        U R                  U5      n
U R	                  Xj5      nU R                  Xa5        [        US/ 5      (       a  U(       a  [        S5      e[        UR                  5       5      nU VVs/ s H  u  pUPM	     nnn[        USS5      [        :H  nU(       a  / n/ n[        Xa5      n[        UR                  [        [        45      (       a  [        UR                  5      [         :  a  UR"                  [$        R&                  :w  a  [)        S UR                   5       5      nU Vs/ s H/  nUUR                  ;  d  M  UR+                  U5      (       a  M-  UPM1     nn[-        UR                  U5      n[        U5      [        UR                  5      :  a  UUl        / nU H1  u  nn[        U[.        5      (       d  M  UR1                  US-   5        M3     [        5       nUb;  [2        UR"                     nU Vs1 s H  nUR5                  SU-   S5      S   iM     nnU GH  u  nnU(       d  M  U H,  nUR7                  U5      (       d  M  UR1                  U5          O   U(       a  US	   U:X  a  MT  Uc  U R9                  Xl5      n[        U[:        5      (       a  UR1                  U5        M  U(       d  UR1                  U5        M  U R<                  R1                  U5        [?        X5      u  nnnU RA                  XaU5        U(       a  [B        O[D        nU" 5          U RG                  XbUUUUS
9  SSS5        GM  UU;  a  UR1                  U5        OrU R<                  R1                  U5        [?        X5      u  nnnU RA                  XaU5        U(       a  [B        O[D        nU" 5          U RG                  XbUUUUS
9  SSS5        U R9                  Xl5      (       d  GM  U	R1                  U5        GM     [        US/ 5      (       a  U RI                  XaX$S9  Ub  [        U	5      n[        U R<                  5      n UU -
  n!U U-
  n"Sn#U!(       d  U"(       a  Sn#U!(       a  U#S[K        U!5       S3-  n#U"(       a  U#S[K        U"5       S3-  n#U#(       a  [L        RN                  " U#[P        5        U R<                  (       Gd  U RR                  (       Gd  U(       d  U(       a  U(       d  [        S5      eU(       d#  U(       a  UR                  (       d  [        S5      eU(       dd  U(       a]  SUR                   S3n$[        USS5      b  U$SURT                   S3-  n$[        USS5      b  U$SURV                   S3-  n$[        U$5      eSn$[        USS5      b  U$SURT                   S3-  n$[        USS5      b  U$SURV                   S3-  n$[        U$5      e[Y        US5      (       a=  URZ                  (       a,  U(       d%  [L        RN                  " SURZ                   S35        OU(       d  UR                  (       a;  U R<                  (       d*  [L        RN                  " SUR                   S3[P        5        OL[        US/ 5      (       a:  U RR                  (       d)  [L        RN                  " SUR\                   S 3[P        5        U R_                  US!9n%U%(       a  [L        RN                  " S"U%< S#35        U Ra                  U Rb                  URd                  S$9  U Rg                  U5        U R                   U   Rd                  (       a)  URi                  5        H  u  n&n'UU&;   d  M  S%U'l5        M     [m        UU[n        R                  U 5      UX Rb                  ;   S&9  gs  snnf s  snf s  snf ! , (       d  f       GM  = f! , (       d  f       GN= f)'a  
Creates adapter layers and replaces the target modules with the adapter layers. This method is called under the
hood by `peft.mapping.get_peft_model` if a non-prompt tuning adapter class is passed.

The corresponding PEFT config is directly retrieved from the `peft_config` attribute of the BaseTuner class.

Args:
    model (`nn.Module`):
        The model to be tuned.
    adapter_name (`str`):
        The adapter name.
    autocast_adapter_dtype (`bool`, *optional*):
        Whether to autocast the adapter dtype. Defaults to `True`.
    low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
        Create empty adapter weights on meta device. Useful to speed up the loading process.
    state_dict (`dict`, *optional*, defaults to `None`)
        If a state_dict is passed here, the adapters will be injected based on the entries of the state_dict.
        This can be useful when the exact `target_modules` of the PEFT method is unknown, for instance because
        the checkpoint was created without meta data. Note that the values from the state_dict are not used,
        only the keys are used to determine the correct layers that should be adapted.

target_parameterszTrying to inject a PEFT adapter from a state_dict but the PEFT config uses `target_parameters`. This is not supported -- when using `target_parameters`, please inject the adapter without the state_dict.r   Nc              3  ,   #    U  H
  nS U-   v   M     g7f.Nr   )r   suffixs     rT   r  +BaseTuner.inject_adapter.<locals>.<genexpr>  s     S8RfS6\8Rs   r:  r(   r   )r   )rg   rh   r   r   r+   zWhile injecting the PEFT adapters, an inconsistency was discovered between the PEFT config and the provided state_dict. This is not necessarily an issue and can be ignored if this was the intent. z;The PEFT config contained these additional target modules: z. z:The state_dict contained these additional target modules: zAll modules were excluded. This is likely unintended. Check your `target_modules`, `exclude_modules` and `modules_to_save` configuration.znNo `target_modules` passed but also no `target_parameters` found. Please check the values for these arguments.zTarget modules zL not found in the base model. Please check the target modules and try again.layers_to_transformz, Note: You specified 'layers_to_transform': layers_patternz& You also specified 'layers_pattern': a"  No modules were targeted for adaptation. This might be caused by a combination of mismatched target modules and excluded modules. Please check your `target_modules` and `exclude_modules` configuration. You may also have only targeted modules that are marked to be saved (`modules_to_save`).exclude_modulesz You have passed exclude_modules=zS but no modules were excluded. Please check that exclude_modules was set correctly.ztarget_modules=z$ were set but no module was matched.ztarget_parameters=z' were set but no parameter was matched.)rh   r  z are part of the adapter. This can lead to complications, for example when merging the adapter or converting your model to formats other than safetensors. See for example https://github.com/huggingface/peft/issues/2018.inference_modeF)rh   rg   r   r   activate_adapter)8rg   r
  _check_tied_modulesget_model_configr   r   re   rf   r>   r2   r    _maybe_include_all_linear_layersr4   r   r   r  r   rd   r"   IA3tupler   _find_minimal_target_modulesr   r7   r   rsplit
startswithr   _ExcludedModuler   r'   r1  r   r   r   _inject_parameterssortedr   r   RuntimeWarningr   r>  r?  r3   r@  r7  r  set_adapterr   rB  r   r   r   r!   r   )(r   rh   r   r  r   r   rg   excluded_modulesunmatched_modules!targeted_modules_from_peft_configr   r2   r   r$  r%  uses_dummy_target_modulessuffixesrJ   names_no_targetnew_target_modulesexisting_adapter_prefixesrK   module_namesr   kadapter_keyresultr   r   ri   ctxtargeted_set_from_peft_configtargeted_set_from_state_dictdiff_peft_configdiff_state_dictwarning_msg	error_msgr  r   r   s(                                           rT   r   BaseTuner.inject_adapter  s   D &&|479) 	&&{3  4,,U322;MK/; 3R88Zx 
 U0023&34mFCCm4$+K9I4$PTh$h!$MH 7{J {11D#;??[//04WW&&(,,6S8R8RSSH!)!)d+:T:T.T^b^k^klt^u   ">k>X>XZi!j%&[-G-G)HH-?* %'!(KC&.11)00s; )
 "%!01F1FGFBLM*QAHHS6\15a8*LM(KC  9>>+..$++C0  9
  $4R$8C$?!99+Kfo66$++C0%,,S1..55c:2A%2M/FFK::;{[0A,{C00'v{F`c 1   l*%,,S1..55c:2A%2M/FFK::;{[0A,{C00'v{F`c 1   33KEE5<<SA] )` ; 3R88##'< $  ! -00Q,R)+.t/I/I+J(<?[[:=ZZOK? 
  QRXYiRjQkkmn ![\bcr\s[ttvwwk>:)))$2O2O2OXq(9 j  &*;KD^D^ '  &*; &k&@&@%A BE F  ;(=tDP#OP[PoPoOppq!rrI;(8$?K#I+JdJdIeef!ggI ++]  ;(=tDP#OP[PoPoOppq!rrI;(8$?K#I+JdJdIeef!ggI ++["3449T9T]mMM2;3N3N2O PG G
 +
 ))$2L2L%k&@&@%AAefhv &92>>tGdGd()F)F(GGno"
 #;;%;HMMUAT@V WS S 	--k>X>XY--e4L)88..011$&+AO 1 	)#"33D9%)-A-AA	
o 5:( N8  s6   -_(_$ _$_$<"_)"_./`.
_>	
`	c                R  ^ ^^^^^^ S mUUUUU U4S jn[        TR                  5      n[        U5      nTR                  5        H  u  p[	        U	S5      (       aZ  U HR  mTR                  S5      u  pnX:w  a  M  [        XS5      c  M-  U" UTU5        T R                  R                  T5        MT     Mp  T" U5      nU	R                  SS9 HS  u  pU SU 3mTU;   d  [        U4S jU 5       5      (       d  M.  U" UTU5        T R                  R                  T5        MU     M     g)	z1Inject layers based on peft_config.target_modulesc                R    SnX;   a  U R                  U5      u  p#nX$-   n X;   a  M  U $ )Nz.base_layer)
rpartition)rM   rJ   r   r$  r;  s        rT   strip_base_layer_from_name@BaseTuner._inject_parameters.<locals>.strip_base_layer_from_name  s;    
 !D%$/$:$:4$@!6$o % rn   c                  > [        TU 5      u  p4nT" U 5      nTR                  U5      n[        U[        5      (       a?  UR                  R
                  S:w  a%  [        SU S[        U5      R
                   S35      eTR                  TTU5        T
(       a  [        O[        nU" 5          TR                  TT	UUUUUR                  S5      S   S9  S S S 5        g ! , (       d  f       g = f)NParamWrapperz+Trying to wrap an `nn.Parameter` of layer 'z
' of type z, which is not a valid target. Make sure that this layer is not also targeted with `target_modules`. For some models, PEFT will do this automatically, try setting `target_modules=[]` to prevent it.r:  r=  )r   r   )r'   get_submoduler4   r   r   r   rf   r   r1  r   r   r   rg  )rM   r   
param_namer   r   ri   unwrapped_module_nameunwrapped_moduler]  r   r   rh   rg   r   rh  s            rT   create_and_replace_param>BaseTuner._inject_parameters.<locals>.create_and_replace_param  s    *9%*M'FK$>{$K!$223HI*N;;@P@Z@Z@c@cgu@u ABWAXXbF|,,- .EE  22;{S(9${C((  ##-#8#8#=b#A )  s   +(C
C*parametrizationsr:  NFrecursec              3  L   >#    U  H  nTR                  S U 35      v   M     g7fr9  r   r   
target_keyr   s     rT   r  /BaseTuner._inject_parameters.<locals>.<genexpr>  s'     3reqWaCLL1ZLAQ4R4Req   !$)r   r7  rN  r2   r3   rg  re   r   r7   r   r  )r   rg   rh   r   r   rp  unsorted_target_namestarget_namesrM   rK   target_module_namer$  rm  rn  rS   r   rh  s   `````          @@rT   rM  BaseTuner._inject_parameters  s%   
			 	8 !$K$A$A B 34#(#6#6#8Kv122 (C8;s8K5&:)8 v48@ ,[#zJ1188= ( )C;(O%)/)@)@)@)O%J231ZLAC|+3req3r0r0r 1c:N55<<SA *P' $9rn   c                  ^ [        XU5        [        US5      (       a  UR                  n[        US5      (       d3  UR                  Ul        [        US5      (       a  UR                  Ul        [        USS5      bc  [        US5      (       a  UR                  UR                  l        OUR                  Ul        UR                  UR                  R                  5        [        R                  " S5      mUR                  5        H  u  pVU R                  U;   d  M  [        US5      (       a  UR                  nOp[        US5      (       a  UR                  nOR[        US5      (       a  UR                  nO4[        US	S5      b  UR                  nO[        UR!                  5       5      n[#        U4S
 jUR!                  5        5       5      (       a  M  UR                  UR                  5        M     g)a  
Replace the sub-module of a given moduel with a new PEFT module.

This also deals with device placement of the new module to be in line with the child module.

Args:
    parent (`nn.Module`):
        The parent module on which the replacement should take place.
    child_name (`str`):
        The name of the child module to be replaced.
    new_module (`nn.Module`):
        The new PEFT module.
    child (`nn.Module`):
        The original child module that is being replaced.

r,   r   stateNr.   qweightr{   r   in_proj_weightc              3  @   >#    U  H  oR                   T:H  v   M     g 7fr   r9   r   r   r.   s     rT   r  ,BaseTuner._replace_module.<locals>.<genexpr>4  s     I5H88t+5H   )setattrr3   r,   r   r   re   r  rG   r9   r8   r2   r   r  r{   r  next
parametersr  )	r   r   
child_name
new_modulechildrJ   rK   r   r.   s	           @rT   r!  BaseTuner._replace_module   ss   " 	J/
 5,''$$Ez<00 %Juf%%"'**
5'4(4z<00.3kk
%%+#(;;
 MM%,,--.||F#&446LD{{d"5),,"]]FUE**"YYFUH--"\\FU$4d;G"11F!%"2"2"45FIV5F5F5HIIIIIfmm, 7rn   c                    U R                  5         U R                  R                  5        H=  n[        U[        5      (       d  M  [        U5         UR                  XS9  SSS5        M?     g! , (       d  f       MQ  = f)a  
This method merges the adapter layers into the base model.

Merging adapters can lead to a speed up of the forward pass. A copy of the adapter weights is still kept in
memory, which is required to unmerge the adapters. In order to merge the adapter weights without keeping them
in memory, please call `merge_and_unload`.

Args:
    adapter_names (`list[str]`, *optional*):
        The list of adapter names that should be merged. If `None`, all active adapters will be merged.
        Defaults to `None`.
    safe_merge (`bool`, *optional*):
        If `True`, the merge operation will be performed in a copy of the original weights and check for NaNs
        before merging the weights. This is useful if you want to check if the merge operation will produce
        NaNs. Defaults to `False`.
)r   r  N)r  rh   r   r4   r   rU   r  )r   r   r  rK   s       rT   merge_adapterBaseTuner.merge_adapter7  sY    . 	!!#jj((*F&.11!&)LL}LT *) +))s   A--
A<	c                    U R                   R                  5        H>  n[        U[        5      (       d  M  [	        U5         UR                  5         SSS5        M@     g! , (       d  f       MR  = f)zE
This method unmerges all merged adapter layers from the base model.
N)rh   r   r4   r   rU   unmerge)r   rK   s     rT   unmerge_adapterBaseTuner.unmerge_adapterT  sI     jj((*F&.11!&)NN$ *) +))s    A
A-	c                    [        XUS9  g)a  
Sets the active adapter(s) on auxiliary modules.

If the subclass (e.g. `LoraModel`) supports auxiliary modules like `modules_to_save`, it should call this
method in `set_adapter` to ensure that those auxiliary modules are being set correctly.

Args:
    adapter_name (`str` or `list[str]`):
        The name(s) of the adapter(s) to be set as active. The adapters must be loaded first.
    inference_mode (bool, optional):
         Whether the activated adapter should be frozen (i.e. `requires_grad=False`). Default is False.
rA  N)r   r   r   rB  s      rT   set_auxiliary_adapters BaseTuner.set_auxiliary_adapters]  s     	TGrn   c                N    [        U R                  XU R                  S9  Xl        g)a	  Set the active adapter(s).

Args:
    adapter_name (str, list[str]):
        The name(s) of the adapter(s) to set as active
    inference_mode (bool, optional):
         Whether the activated adapter should be frozen (i.e. `requires_grad=False`). Default is False.
)r   rB  r   N)rP  rh   r   r   r  s      rT   rP  BaseTuner.set_adapterl  s&     	JJ\\`\p\p	
 +rn   c                    [        U S[        5      n[        US5      (       a  UR                  5       nU$ [        R
                  " U5      (       a  [        R                  " U5      nU$ )a<  
This method gets the config from a model in dictionary form. If model has not attribute config, then this
method returns a default config.

Args:
    model (`nn.Module`):
        Model to get the config from.
    default (`dict|None`, *optional*)::
        What to return if model does not have a config attribute.
rb   to_dict)re   r   r3   r  dataclassesis_dataclassasdict)rh   r   s     rT   rE  BaseTuner.get_model_configz  s_     uh0BC<++'//1L  %%l33&--l;Lrn   c                    / nU R                  U5      nUR                  S5      (       aB  U R                   H2  nUR                  S5      S   [        ;   d  M!  UR                  U5        M4     U$ )Ntie_word_embeddingsr:  r=  )rE  r   r   rC   r   r7   )r   rh   r  r   target_modules        rT   r  "BaseTuner._get_tied_target_modules  sk     ,,U3122!%!;!; !&&s+B/3HH'..}= "< #"rn   c                    [        U 5      $ r   )r   r   s    rT   r   /BaseTuner._get_module_names_tied_with_embedding  s    4T::rn   c                4    Sn[         R                  " U5        g)z
This method adds modules to tie to `peft_config` so that those modules can be tied downstream. By default this
method raises a warning, and each tuner class extending `BaseTuner` can choose to implement this.
K  Model has `tie_word_embeddings=True` and a tied layer is part of the adapter, but no implementation exists to tie the adapters. This can lead to complications, for example when merging the adapter or converting your model to formats other than safetensors. Check the discussion here: https://github.com/huggingface/peft/issues/2777N)r   r   )r   rg   tied_weight_keysr   s       rT   _add_modules_to_tieBaseTuner._add_modules_to_tie  s    Y 	 	crn   c                   [        [        US/ 5      =(       d    / 5      n[        S U 5       5      nU R                  5       n[        USS5      (       ad  U(       a  U(       a  U R	                  X%5        g	U(       d  U(       a  [
        R                  " S5        g	U(       d  [
        R                  " S5        g	g	U(       aK  U(       aC  [        US5      (       a  Sn[
        R                  " U5        g	Sn[
        R                  " U5        g	g	g	)
z
Checks if any of the tied layers are targetted via `modules_to_save`. Updates the `peft_config.modules_to_tie`
with any layers that needs to be tied
modules_to_savec              3  2   #    U  H  o[         ;   v   M     g 7fr   )r   )r   r   s     rT   r  0BaseTuner._check_tied_modules.<locals>.<genexpr>  s     "W!(=#=s   ensure_weight_tyingFz\You have requested `ensure_weight_tying`, but no tied modules are added in `modules_to_save`zUYou have requested `ensure_weight_tying`, but no tied modules were found in the modelaG  Model has `tie_word_embeddings=True` and a tied layer is part of the adapter, but `ensure_weight_tying` is not set to True. This can lead to complications, for example when merging the adapter or converting your model to formats other than safetensors. Check the discussion here: https://github.com/huggingface/peft/issues/2777r  N)r   re   r  r   r  r   r   r3   )r   rh   rg   r  is_embedding_to_saver  r   s          rT   rD  BaseTuner._check_tied_modules  s    
 gk3DbIORP""W"WWEEG; 5u==#(8((G).>r &uv & "&6{$9::a  c"a  c"% '7!rn   c                ~   >  [         TU ]  U5      $ ! [         a     US:X  a  e [        U R                  U5      s $ f = f)z1Forward missing attributes to the wrapped module.rh   )r   __getattr__r   re   rh   )r   rJ   r   s     rT   r  BaseTuner.__getattr__  sB    	-7&t,, 	-w4::t,,	-s    '<<)r   rh   rg   r   r   FN)
rg   z(Union[PeftConfig, dict[str, PeftConfig]]r   r   r   boolr   !Optional[dict[str, torch.Tensor]]returnNoner  	list[str])r   r	   r   r	   )rh   	nn.Modulerb   r&   r   r   r  r  )rg   r&   r   r?   r  r&   )rg   r&   rh   r  )rg   r&   r   r   r  bool | re.Match[str] | Noner   )rg   r&   r   r   r   r  ri   r   r   r  r   r   r   zOptional[str]r  r  )rh   r  r  r  Tr   r  r  r  r  r  r   r   r  r  r   zstr | Sequence[str]r   r  r  r  )rb   r&   r  r  )r   r   r  r  r  r  )TFFN)
r  r  r#  r  r  r  r   Optional[list[str]]r  r  )FFN)r#  r  r  r  r   r  r  torch.nn.Module)r  r  rg   r&   rh   r  ri   r   )TFN)rh   r  r   r   r  r  r   r  r   r  r  r  )
rg   r&   rh   r  r   r   r   r  r  r  NF)r   r  r  r  r  r  )r   str | list[str]rB  r  r  r  F)rh   r  r  r?   )rh   r  r  r  )rh   r  )rJ   r   ).r   
__module____qualname____firstlineno____doc____annotations__r   propertyr   r   r   r   r   staticmethodr   r   r   r   r   r   r   r   r   r
  r  r  r'  r+  r.  r1  r4  r   rM  r!  r  r  r  rP  rE  r  r   r  rD  r  __static_attributes____classcell__)r   s   @rT   r   r      s   $R K)) 0/ #(8<#2 >#2 	#2
  #2 6#2 
#2 #2J # #38 < <"  )-     	 
       &  
   DZ00
0"/0 
`2q"L ! -1  	
 + 
@ im$
$
59$
Re$
	$
L>Ji	i (,"'8<W
W
 W
 !%	W

  W
 6W
 
W
rIB%IB.7IBGJIB_cIB	IBV5-nU:%H+  $
#;(#T- -rn   r   c                     \ rS rSr% SrSrS\S'   SrS\S'   SrS\S	'   S
r	S\S'   / r
S\S'   S%S jrS r\S&S j5       r\S&S j5       rS'S(S jjrS)S jr\S*S j5       r\S*S j5       r\S+S j5       rS,S jr\S 5       rS-S jrS.S/S jjrS0S jrS1S jrS2S3S jjrS4S5S  jjr\S6S! j5       r\S7S" j5       rS8S# jrS$rg)9r   i  a/  
A tuner layer mixin that provides the common methods and attributes for all tuners.

Args:
    is_pluggable (`bool`, *optional*):
        Whether the adapter layer can be plugged to any pytorch module
    active_adapters (Union[List[`str`], `str`], *optional*):
        The name of the active adapter.
r   ztuple[str, ...]adapter_layer_namesother_param_namesFr  _disable_adaptersdefaultr  _active_adapterr  merged_adaptersc                j    U n[        US5      (       a  UR                  n[        US5      (       a  M  U$ )zt
(Recursively) get the base_layer.

This is necessary for the case that the tuner layer wraps another tuner layer.

r,   )r3   r,   r   r,   s     rT   r"  BaseTunerLayer.get_base_layer  s6     
j,//#..J j,//rn   c                   U R                  5       n[        US5      (       d  gUR                  n[        U[        [
        45      (       a=  [        R                  " X!R                  R                  UR                  R                  S9$ [        U[        R                  5      (       a@  UR                  5       S:X  a  U$ [        R                  " SUR                   S3[         5        gg)a  
Extract embed_scale from base layer if present and valid.

Some embedding layers (e.g., Gemma3TextScaledWordEmbedding) apply scaling to embeddings in their forward
method. This method checks for the presence of an `embed_scale` attribute. If it exists, it is assumed to be a
scalar. Its shape is validated accordingly.

Returns:
    torch.Tensor or None: The embed_scale tensor if found and valid, None otherwise.
embed_scaleN)r9   dtyper(   z'Found embed_scale attribute with shape z, expected scalar. Embedding scaling will not be applied. If this is unexpected, please open an issue at https://github.com/huggingface/peft/issues)r"  r3   r  r4   intfloatr8   rF   r   r9   r  Tensornumelr   r   r   r$   )r   r,   r  s      rT   _get_embed_scaleBaseTunerLayer._get_embed_scale  s     ((*
z=11 ,, kC<00<<4E4E4L4LT^TeTeTkTkll k5<<00  "a'"" =k>O>O=P QA A  	 rn   c                |    U R                  5       n[        US5      (       a  UR                  nU$ UR                  nU$ )Nr  )r"  r3   r  r   )r   r,   r   s      rT   r   BaseTunerLayer.weight(  sC     ((*
:y))''F   &&Frn   c                :    U R                  5       nUR                  $ r   )r"  r   r  s     rT   r   BaseTunerLayer.bias8  s    ((*
rn   Nc                    [         er   r   )r   r  r   s      rT   r  BaseTunerLayer.merge=      !!rn   c                    [         er   r  r   s    rT   r  BaseTunerLayer.unmerge@  r  rn   c                ,    [        U R                  5      $ r   )r  r  r   s    rT   mergedBaseTunerLayer.mergedC  s    D(())rn   c                    U R                   $ r   )r  r   s    rT   disable_adaptersBaseTunerLayer.disable_adaptersG  s     %%%rn   c                    U R                   $ r   )r  r   s    rT   r   BaseTunerLayer.active_adapterL  s     ###rn   c                   [        5       nU R                   Hg  n[        X5      n[        U[        R
                  [        R                  45      (       d  M?  UR                  [        UR                  5       5      5        Mi     U$ )z:Return all adapter names that can be found on this module.)	r   r  re   r4   r   
ModuleDictParameterDictr   r@   )r   adapters
layer_namerK   s       rT   _get_available_adapters&BaseTunerLayer._get_available_adaptersQ  s`    522JT.Ffr}}b6F6F&GHHOOC./	 3
 rn   c                r    [        U R                  [        5      (       a  U R                  /$ U R                  $ r   r   r   s    rT   r   BaseTunerLayer.active_adapters[  r   rn   c                    U(       a#  U R                  U R                  5        SU l        gU R                   H  n[	        X5      nUR                  S5        M!     SU l        g)zToggle the enabling and disabling of adapters

Takes care of setting the requires_grad flag for the adapter weights.

Args:
    enabled (bool): True to enable adapters, False to disable adapters
FTN)rP  r   r  r  re   requires_grad_)r   r   r  rH   s       rT   r   BaseTunerLayer.enable_adaptersb  sW     T112%*D" #66
1$$U+ 7 &*D"rn   c                   [        U[        5      (       a  U/nU R                   HW  n[        X5      nUR	                  5        H5  u  pVXQ;   a  U(       d  UR                  S5        M$  UR                  S5        M7     MY     Xl        g)a  Set the active adapter(s).

Additionally, this function will set the specified adapter to trainable (i.e., requires_grad=True) unless
inference_mode is True.

Args:
    adapter_name (`str` or `list[str]`):
         The name(s) of the adapter(s) to set as active.
    inference_mode (bool, optional):
         Whether the activated adapter should be frozen (i.e. `requires_grad=False`). Default is False.
TFN)r4   r   r  re   itemsr  r  )r   r   rB  r  module_dictr   rH   s          rT   rP  BaseTunerLayer.set_adaptert  sw     mS))*OM 22J!$3K)//1
(> ((.((/ 2 3  -rn   c                    [        5       nU R                  U R                  -    H@  n[        X5      n[	        US5      (       d  M!  UR                  UR                  5       5        MB     [        U5      $ )z3Return a sorted list of all available adapter namesr@   )r   r  r  re   r3   r   r@   rN  )r   r   rJ   attrs       rT   _all_available_adapter_names+BaseTunerLayer._all_available_adapter_names  s_    ,,t/E/EED 4&DtV$$$$TYY[1 F m$$rn   c                   U R                   U R                  -    H   nU[        X5      ;   d  M  [        X5      U	 M"     XR                  ;   a  U R                  SS nUR	                  U5        U(       a  U R                  U5        gU R                  5       nU(       d  U R                  / 5        gUS   n[        R                  " SU SU S35        U R                  US   5        gg)a  
Delete an adapter from the layer

This should be called on all adapter layers, or else we will get an inconsistent state.

This method will also set a new active adapter if the deleted adapter was an active adapter. It is important
that the new adapter is chosen in a deterministic way, so that the same adapter is chosen on all layers.

Args:
    adapter_name (`str`): The name of the adapter to delete

Nr   r   z< was active which is now deleted. Setting active adapter to r:  )	r  r  re   r   removerP  r  r   r   )r   r   r
  r   remaining_adaptersnew_active_adapters         rT   r   BaseTunerLayer.delete_adapter  s     ,,t/E/EEDwt22D'5 F ///"2215O""<0  1 &*%F%F%H")$$R();A)>&MM"<.0l-.a1 $$%7%:;% 0rn   c                    [        U[        5      (       a  U1nO[        U5      nU R                   H?  n[	        X5      nUR                  5        H  u  pgXc;   d  M  UR                  U5        M     MA     g)r   N)r4   r   r   r  re   r  r  )r   r   r   adapter_names_setr  r  r   rH   s           rT   r    BaseTunerLayer.set_requires_grad  sg     mS))!. #M 222J!$3K)//1
+((7 2 3rn   c                  ^	 Ucn  U R                  5       n[        U[        R                  5      (       a  UR                  nS H,  n[        X4S5      nUc  M  UR                  nUR                  n  O   g[        R                  " S5      m	U R                  U R                  -    H  n[        XS5      n[        U[        R                  [        R                  [        45      (       d  ME  X;  a  ML  [        U	4S jUR                  5        5       5      (       a  Mv  WR                  R                   (       d  UR                  R"                  (       a  X   R%                  UWS9X'   M  X   R%                  U5      X'   M     g)zE
Move the adapter of the given name to the device of the base layer.
N)r   r  r.   c              3  @   >#    U  H  oR                   T:H  v   M     g 7fr   r  r  s     rT   r  GBaseTunerLayer._move_adapter_to_device_of_base_layer.<locals>.<genexpr>  s     H-G88t#-Gr  r  )r"  r4   r   r   r\   re   r9   r  r8   r  r  r  r  r)   r  r  is_floating_point
is_complexrG   )
r   r   r9   r,   weight_namer   r  adapter_layer_nameadapter_layerr.   s
            @rT   %_move_adapter_to_device_of_base_layer4BaseTunerLayer._move_adapter_to_device_of_base_layer  s9    >,,.J*b&;&;<<'00
4 $?%#]]F"LLE  5 ||F#
 #'":":T=S=S"S#DdCMmbmmR=M=Mz-Z[[0H]-E-E-GHHH ||--1H1H.;.I.L.LV[`.L.a+.;.I.L.LV.T+ #Trn   c                    g r   r   r   xr  s      rT   _cast_input_dtype BaseTunerLayer._cast_input_dtype  s    FIrn   c                    g r   r   r!  s      rT   r#  r$    s    VYrn   c                t    Uc  g[        U SS5      nU(       a  UR                  U:X  a  U$ UR                  US9$ )aT  
Whether to cast the dtype of the input of the forward method.

Usually, we want to enable this to align the input dtype with the dtype of the weight, but by setting
layer.cast_input_dtype=False, this can be disabled if necessary.

Enabling or disabling can be managed via the peft.helpers.disable_lora_input_dtype_casting context manager.
Ncast_input_dtype_enabledTr  )re   r  rG   )r   r"  r  r'  s       rT   r#  r$    s@     9#*41KT#R (agg.>Htt%t  rn   )r  r  )r  r  )r  torch.Tensorr  )r  r  r   r  r  r  r  )r  r  )r  r  )r  set[str]r  r  )r   r  rB  r  r  r  r  r  r  r  r   )r   r   r9   zOptional[torch.device]r  r  )r"  r  r  torch.dtyper  r  )r"  r(  r  r*  r  r(  )r  r*  ) r   r  r  r  r  r  r  r  r  r  r  r"  r  r  r   r   r  r  r  r  r   r  r   r   rP  r  r   r   r  r   r#  r  r   rn   rT   r   r     s    ,.-)++ $t# (1O_0 "$OY#
#J    "" * * & & $ $ # #*$-8	%#<J8*%UN I IY Y!rn   r   c                ~  ^ [        U [        5      (       d  U (       d  [        S5      e[        U 5      n SU ;   a  [        S5      e[        U5      nU R	                  U5      (       d  Sn[        U5      eS nU VVs1 s H  oC" U5        H  oUiM     M     nnnU  Vs0 s H  oDU" U5      _M     nn[        5       n[        UR                  5       S S9 HI  u  mn	U	 H=  nXX;   d  XV;   a  M  [        U4S jU 5       5      (       a  M+  UR                  U5          MG     MK     U(       d  [        U 5      $ U$ s  snnf s  snf )	a  Find the minimal set of target modules that is sufficient to separate them from the other modules.

Sometimes, a very large list of target_modules could be passed, which can slow down loading of adapters (e.g. when
loaded from diffusers). It may be possible to condense this list from hundreds of items to just a handful of
suffixes that are sufficient to distinguish the target modules from the other modules.

Example:
    ```py
    >>> from peft.tuners.tuners_utils import _find_minimal_target_modules

    >>> target_modules = [f"model.decoder.layers.{i}.self_attn.q_proj" for i in range(100)]
    >>> target_modules += [f"model.decoder.layers.{i}.self_attn.v_proj" for i in range(100)]
    >>> other_module_names = [f"model.encoder.layers.{i}.self_attn.k_proj" for i in range(100)]
    >>> _find_minimal_target_modules(target_modules, other_module_names)
    {"q_proj", "v_proj"}
    ```

Args:
    target_modules (`list[str]` | `set[str]`):
        The list of target modules.
    other_module_names (`list[str]` | `set[str]`):
        The list of other module names. They must not overlap with the target modules.

Returns:
    `set[str]`:
        The minimal set of target modules that is sufficient to separate them from the other modules.

Raises:
    ValueError:
        If `target_modules` is not a list or set of strings or if it contains an empty string. Also raises an error
        if `target_modules` and `other_module_names` contain common elements.
z2target_modules should be a list or set of strings.r+   z2target_modules should not contain an empty string.ztarget_modules and other_module_names contain common elements, this should not happen, please open a GitHub issue at https://github.com/huggingface/peft/issues with the code to reproduce this issuec                    U R                  S5      n[        [        U5      5       Vs/ s H  nSR                  XS  5      PM     snS S S2   $ s  snf )Nr:  r=  )rC   ranger  rD   )spartsrP   s      rT   generate_suffixes7_find_minimal_target_modules.<locals>.generate_suffixesG  sH    -23u:->?->r#->?"EE?s   Ac                    U S   $ )Nr(   r   )tups    rT   <lambda>._find_minimal_target_modules.<locals>.<lambda>V  s    TWXYTZrn   )r   c              3  L   >#    U  H  nTR                  S U-   5      v   M     g7fr9  rv  )r   
req_suffixitems     rT   r  /_find_minimal_target_modules.<locals>.<genexpr>]  s$     [IZ:t}}S:%566IZrz  )	r4   r   rf   r   
isdisjointrN  r  r  add)
r   other_module_namesr   r0  r8  r;  other_module_suffixestarget_modules_suffix_maprequired_suffixesrU  s
       `     rT   rI  rI    sP   F .#&&nMNN(N	^MNN/0$$%788v 	 oF
 1Ci0BQbcgQhvVQhV0Bi LZ Z>4'8'>!>> Z  !!:!@!@!BHZ[hF*f.M[IZ[[[!%%f-  \ >""/ j ![s   ;D4D:c                      \ rS rSrSrS rSrg)rL  if  zh
A private helper method used to represent excluded modules in the check_target_module_exists function.
c                    gr  r   r   s    rT   __bool___ExcludedModule.__bool__k  s    rn   r   N)r   r  r  r  r  rB  r  r   rn   rT   rL  rL  f  s    rn   rL  c                  ^ [        U S5      (       a  U R                  (       a  [        U R                  [        5      (       a1  [        R
                  " U R                  T5      (       a
  [        5       $ OHTU R                  ;   a
  [        5       $ [        U4S jU R                   5       5      (       a
  [        5       $ [        U SS5      nU(       a$  [        U4S jU 5       5      (       a
  [        5       $ U R                  c  U R                  b  g[        U R                  [        5      (       a  [        U R                  T5      nU$ TU R                  ;   a  SnU$ [        U4S jU R                   5       5      n[        U S	S5      n[        U S
S5      nUSL=(       a%    [        U[        5      (       a  [        U5      S:g  OSnU(       a  U(       a  SnUb  [        U5      S:X  a  [        R                  " ST5      nOC[        U[        5      (       a  U/OUnU H#  n[        R                  " SU S3T5      nUc  M#    O   Uc  SnU$ [        UR!                  S5      5      n[        U[        5      (       a  Xt:H  nU$ Xt;   nU$ )a  A helper method to check if the passed module's key name matches any of the target modules in the adapter_config.

Args:
    config (`PeftConfig`):
        A config to match target modules from.
    key (`str`):
        A key to search any matches in config

Returns:
    `bool` | `re.Match[str]` | `None`:
        True or re.Match object if key matches any target modules from config, False or None if no match found.
r@  c              3  L   >#    U  H  nTR                  S U 35      v   M     g7fr9  rv  )r   exclude_keyr   s     rT   r  -check_target_module_exists.<locals>.<genexpr>  s&     [DZ[+/00DZrz  r  Nc              3  Z   >#    U  H   n[         R                  " S U S3T5      v   M"     g7f)z(^|.*\.)z($|\..*)N)rematch)r   r   r   s     rT   r  rG    s(     O!rxx8A3h/55s   (+FTc              3  L   >#    U  H  nTR                  S U 35      v   M     g7fr9  rv  rw  s     rT   r  rG    s&     !iShZ#,,:,/?"@"@Shrz  r>  r?  r   z.*\.[^.]*\.(\d+)\.z.*\.z	\.(\d+)\.r(   )r3   r@  r4   r   rI  	fullmatchrL  r  re   r   r7  r    r>   r  rJ  r  group)	rb   r   r  target_module_foundlayer_indexesr?  is_using_layer_indexeslayer_indexpatterns	    `       rT   r   r   o  sX    v())f.D.Df,,c22||F22C88&(( 9F***"$$[FDZDZ[[["$$ f&7>OOOOO"$$%F,D,D,P&''--6v7L7LcRF E 
%%	%"@ = "!iSYShSh!ii(=tD )94@!.d!: "
'1-'F'FC!#D 	 "&9K %^)<)A hh'<cB5?PS5T5T.!1Zh-G"$((d7)9+Es"KK".  .
 "&+#  "+"3"3A"67mS11*5*F'  +6*F'rn   c                (   U R                   U   nU R                  R                  5        VVs/ s H  u  p4UPM	     nnn/ / S.nU HC  nU R                  X#5      (       a  US   R	                  U5        M/  US   R	                  U5        ME     U$ s  snnf )zo
A helper function to inspect the set of matched and unmatched modules for a PEFT model and the given adapter.
)matched	unmatchedrT  rU  )rg   rh   r2   r   r7   )tunerr   rb   r   r$  r%  r  s          rT   inspect_matched_modulesrW    s     |,F"'++";";"=>"="=H> r2K,,V99	"))#.$++C0	 
  ?s   Bc                  ^ [        U S5      (       d  U $ [        U R                  [        5      (       a"  U R                  R	                  5       [
        :X  d  U $ [        R                  R                  [        4nSn[        5       nUR                  5        Hn  u  nm[        TU5      (       a  UR                  U5        M*  [        T[        5      (       d  MA  [        U4S jU 5       5      (       d  M]  UR                  U5        Mp     [        5       n[        U[        5      (       a  UR!                  5       nUb@  UR                  5        VVs/ s H  u  pXXL d  M  UPM     snnS   n	UR                  U	5        O{U R"                  [$        R&                  :X  a]  [(         HS  n[+        XS5      n
U
c  M  UR                  5        VVs/ s H  u  pXXL d  M  UPM     snnS   n	UR                  U	5          O   UR                  5        HU  u  nm[        T[        5      (       d  M  TR                  5        H$  u  pU(       d  M  UR                  U SU 35        M&     MW     XF-  nX@l        U $ s  snnf s  snnf )z
Helper function to update `target_modules` to all linear/Conv1D layers if provided as 'all-linear'. Adapted from
the QLoRA repository: https://github.com/artidoro/qlora/blob/main/qlora.py
r   )r   c              3  R   >#    U  H  o[        T5      R                  ;   v   M     g 7fr   )r   r   )r   r   rK   s     rT   r  3_maybe_include_all_linear_layers.<locals>.<genexpr>  s!     7i\hWXT&\=R=R8R\hs   $'Nr   r:  )r3   r4   r   r   lowerr   r8   r   r   r   r   r2   r;  r   r  r   get_output_embeddings	task_typer#   SEQ_CLSr   re   )rg   rh   linear_classeslinear_nameslinear_module_namesrJ   module_names_to_exclude
output_embrK   last_module_namecls_headr   r;  r  s           `     rT   rF  rF    s!   
 ; 011 	;--s33&&,,.2QQhhoov.NL%++-ffn--##D)//C7i\h7i4i4i  ##D) . "e%))002
!9>9L9L9Ng9NRXRf9Nghij#''(89""h&6&66 +"55'AFATATAV'mAVZ`ZlAV'mno'p$+//0@A +  --/fn--!'!5!5!76+//6(!F80DE "8 0 2!4-  h (ns   
I(I(
I.I.c                   Uc  U R                   n[        U[        5      (       a  [        SU< S35      eU R                  (       a  [        U R                  5      nU Vs/ s H  o3U;  d  M
  UPM     nnU(       aG  [        R                  " SSR                  U R                  5       SSR                  U5       S35        U$ [        R                  " S5        U$ s  snf )z
Helper function to check which adapters should be merged.

Only return those adapters that are not already merged. Give a warning if some or all of the adapters are already
merged.

z/adapter_names should be a list of strings, got r:  z'Already following adapters were merged ,z#. You are now additionally merging z/All adapters are already merged, nothing to do.)
r   r4   r   rf   r  r   r  r   r   rD   )rK   r   r  rJ   s       rT   check_adapters_to_mergerh    s     ..-%%J=J[[\]^^}}f445*7W-$;V-WMM9#((6CYCY:Z9[ \447HH]4K3LAO  MMKL Xs   	C,CFc                    [         R                  " U 5      nSS jnU(       a0  U R                  5        H  u  pEU" XRR                  U5      5        M     U$ )zClone a module in a pytorch model.

Clones a module of a model, optionally sharing all the parameters between the original and the clone. Simplifies
reusing a module when manipulating the architecture of a model.
c                V    U R                  SS9 H  u  p#UR                  X#5        M     g )NFrs  )r   register_parameter)srcdstrJ   rS   s       rT   _share_weights$clone_module.<locals>._share_weights)  s*    ///>KD""4/ ?rn   )rl  r  rm  r  )copydeepcopyr2   rl  )rK   share_weightsclonern  rJ   	submodules         rT   clone_moduleru  !  sK     MM&!E0 %335OD9&9&9$&?@  6 Lrn   c           	        [        U S5      (       a  U R                  n [        U S5      (       a  M  [        U S5      (       a  U R                  n SnSn[        U S5      (       a  SnU R                  nOd[        U S5      (       a4  [        U R                  S5      (       a  SnU R                  R
                  nO[        U S5      (       a  S	nU R                  nU(       a  [        U[        R                  5      (       d  [        S
5      e/ nU Hp  u  pV[        XV5       H\  n[        U5      nUR                  [        X7   SS95        US   R                  5        H  n	[        U	S5      (       d  M  Xl        M     M^     Mr     [        R                  " U5      nUS:X  a  X0l        O/US:X  a  X0R                  l        OUS	:X  a  X0l        O[        S5      e[        U R"                  S5      (       a  [        U5      U R"                  l        gg)an  Replicate layers in a transfomer model with weight sharing.

This function looks for a module list attribute at model[(.model)*].layers and replicates the layers in the module
list according to the layer map. For example the map `[[0, 4], [2, 5]]` will take the set of layers `[0, 1, 2, 3,
4]` and replace them with a module list containing `[0, 1, 2, 3, 2, 3, 4]`.
rh   bertNlayersllamaencoderrH   hfalconzlCould not locate the layers attribute in the model. Expected Llama, Bert or Falcon compatible architectures.T)rr  r=  	layer_idxz@Unexpected model type, need to handle post-processing of layers.num_hidden_layers)r3   rh   rw  rx  rz  rH   r{  r4   r   
ModuleListrf   r-  r  r7   ru  r   r}  rb   r~  )
rh   	layer_maprc   rx  
new_layersstartendrP   current_idxrt  s
             rT   replicate_layersr  4  s    %
!
! %
!
! uf

J Fuh
			"	"wu}}g'F'F
$$			
Z>>G
 	

 J
u"Aj/Kl69DIJ'^335	9k22*5' 6	 #   ]]:&FW	v	$	x	[\\u||011),Z& 2rn   c                    [        XUS9  U R                  5        H[  n[        XC5      (       d  M  UR                  (       a&  [        R
                  " S5        UR                  5         UR                  XS9  M]     g)ae  Set the active PEFT adapter(s) of the model.

Active adapters are those adapters that participate in the forward pass. Use this function if you want to switch
between multiple PEFT adapters.

Args:
    model (`nn.Module`):
        The model on which the adapter(s) should be set.
    adapter_name (str, list[str]):
        The name(s) of the adapter(s) to set as active
    inference_mode (bool, optional):
         Whether the activated adapter should be frozen (i.e. `requires_grad=False`). Default is False.
    layer_cls (type, optional):
        The class of the adapter layer. Defaults to `BaseTunerLayer`.
rA  zJAdapter cannot be set when the model is merged. Unmerging the model first.N)r   r   r4   r  r   r   r  rP  )rh   r   rB  r   rK   s        rT   rP  rP  m  sX    * ^D--/f((}}jk |K "rn   c                ~    U R                  5        H)  n[        U[        5      (       d  M  UR                  XS9  M+     g )N)new_active_adapters)r   r4   r   r   )rh   r   r  rK   s       rT   _delete_auxiliary_adapterr    s1    --/f677!!,!X "rn   c                   U R                  5        VVs/ s H  u  pEX$;  d  M  UPM     nnnSnU HH  n[        X5      u  pXn[        X5      (       d  M#  UR                  U5        Ub  M9  UR                  SS nMJ     [        XUS9  U$ s  snnf )as  
Delete an existing PEFT adapter.

Note: This function does not delete the PEFT config on the model, if there is one. It will also not completely
purge the PEFT layers if the last PEFT adapter is deleted. For this, consider using `model.unload()` if using a
PEFT model instance, or just reloading the base model.

Args:
    model (`nn.Module`):
        The model from which the adapter should be deleted.
    adapter_name (str):
        The name of the adapter to be deleted.
    prefix (str):
        The prefix of the PEFT method, e.g. "lora_" for LoRA.
    layer_cls (type, optional):
        The class of the adapter layer. Defaults to `BaseTunerLayer`.

Returns:
    new_adapter (list[str] | None):
        The name of remaining adapter(s) after deletion, or `None` if there are no active adapters left. Use this
        to set the new active adapter of the model if necessary.
N)r   r  )r2   r'   r4   r   r   r  )	rh   r   r   r   r   r$  r%  r   r   s	            rT   r   r     s    2 #("5"5"7M"76;L"7HMK&u21f((!!,/"$44Q7  eT_` Ns
   BBc                   U(       d  g[         R                  [         R                  1nU R                  5        GH  n[	        U[
        5      (       d  M  UR                  5        GHZ  n[	        U[        R                  [        R                  [        45      (       d  M:  X;  a  MA  [	        XQ   [        R                  5      (       aF  XQ   R                  U;   a2  XQ   R                  R                  [         R                  5      XQ   l        M  [	        XQ   [         R                  5      (       a8  XQ   R                  U;   a#  XQ   R                  [         R                  5      XQ'   GM  XQ   R!                  5        HC  nUR                  U;   d  M  UR                  R                  [         R                  5      Ul        ME     GM]     GM     g)a1  
A helper method to cast the adapter weights to the correct dtype.

Currently, this only upcasts float16 and bfloat16 to float32.

Args:
    adapter_name (`str`):
        The adapter name.
    autocast_adapter_dtype (`bool`, *optional*):
        Whether to autocast the adapter dtype. Defaults to `True`.
N)r8   float16bfloat16r   r4   r   r   r  r  r)   	Parameterr  datarG   float32r  r  )rh   r   r  dtypes_to_convert_to_fp32rK   rt  rS   s          rT   r  r    sE    "!& ?--/&.11)Ii"--9I9I:)VWW,)12<<@@*004MM3<3J3O3O3R3RSXS`S`3aI+0)15<<@@*004MM.7.E.H.H.WI+"0;;=;;";;!&u}}!=EJ ># *	 "rn   c                    U R                  5        H/  n[        U[        [        45      (       d  M   UR	                  XS9  M1     g)aw  
Enable or disable gradients on the given adapter(s).

Args:
    model (`nn.Module`):
        The model from which the adapter should be deleted.
    adapter_name (`str` or `Sequence[str]`):
        The name of the adapter(s) whose gradients should be enabled/disabled.
    requires_grad (`bool`, *optional*)
        Whether to enable (`True`, default) or disable (`False`).
r   N)r   r4   r   r   r   )rh   r   r   rK   s       rT   r   r     s8     --/f~/GHII$$=$^ "rn   r  )rK   r  r  z#tuple[int, int] | tuple[None, None])r   list[str] | set[str]r<  r  r  r)  )r   r   r  r  )r  )rV  r   r   r   r  r?   )rg   r&   rh   r  r  r&   r   )rK   r   r   r  r  r  r  )rK   r  )rh   r  r  zlist[tuple[int, int]])r   r  rB  r  r   r   r  r  )r   r   r  r  r  r  )
rh   r  r   r   r   r   r   r   r  zlist[str] | Noner  )rh   r  r   r   r  r  r  r  r  )Y
__future__r   rp  r  rA   rI  r  r   abcr   r   collections.abcr   
contextlibr   r   typingr	   r
   r   r   r8   accelerate.hooksr   accelerate.utilsr   r   	packagingr   r   r   transformersr   transformers.pytorch_utilsr   peft.mappingr   
peft.utilsr   peft.utils.constantsr   r   r   r   r   peft.utils.integrationsr   peft.utils.otherr   r   r   r    r!   peft.utils.peft_typesr"   r#   peft.utils.warningr$   rb   r&   utilsr'   _buffer_dictr)   parse__version___torch_supports_dtensorr   is_availabler   rU   rm   r   Moduler   r   rI  rL  r   rW  rF  rh  ru  r  rP  r  r   r  r   r   rn   rT   <module>r     s   #   	 	   # $ 2 1 1  - E    ( - 4 6  7  5 *  # $ "--(9(9:gmmG>TT 5Z%:K:K:X:X:Z  @S @SF,:%zC-		3 C-L s!S s!l	O(O>ROOd EP<~8&19x !&4	L!L L $	L
 
L<Y Yg$$$'$14$AU$$N(>V_rn   