
    bi                        d dl mZ d dlZd dlmZ d dlmZ ddlmZ ddl	m
Z
 ddlmZmZ dd	lmZ dd
lmZ ddlmZmZ ddlmZ 	 	 	 	 	 d	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZy)    )annotationsN)Optional)PreTrainedModel   ) MODEL_TYPE_TO_PEFT_MODEL_MAPPING)
PeftConfig)PEFT_TYPE_TO_CONFIG_MAPPINGPEFT_TYPE_TO_PREFIX_MAPPING)PeftMixedModel)	PeftModel)	BaseTunerBaseTunerLayer)_prepare_prompt_learning_configc                   t        j                  |       }|j                  }| j                  j	                  dd      }	|	|_        t        d | j                         D              rt        j                  d       |!||	k7  rt        j                  d| d|	 d       |G|j                  4|j                  |k7  r%t        j                  d|j                   d	|        ||_	        t        |t        d
         r&|j                  dk(  r|st        j                  d       t        j                  |j                        }
|
r ||
v rt        j                  d| d|
 d       |rt        | ||      S |j                   t#        j$                         vr|j&                  st)        | ||||      S |j&                  rt+        ||      }t#        |j                      | ||||      S )a  
    Returns a Peft model object from a model and a config, where the model will be modified in-place.

    Args:
        model ([`transformers.PreTrainedModel`]):
            Model to be wrapped.
        peft_config ([`PeftConfig`]):
            Configuration object containing the parameters of the Peft model.
        adapter_name (`str`, `optional`, defaults to `"default"`):
            The name of the adapter to be injected, if not provided, the default adapter name is used ("default").
        mixed (`bool`, `optional`, defaults to `False`):
            Whether to allow mixing different (compatible) adapter types.
        autocast_adapter_dtype (`bool`, *optional*):
            Whether to autocast the adapter dtype. Defaults to `True`. Right now, this will only cast adapter weights
            using float16 or bfloat16 to float32, as this is typically required for stable training, and only affect
            select PEFT tuners.
        revision (`str`, `optional`, defaults to `main`):
            The revision of the base model. If this isn't set, the saved peft model will load the `main` revision for
            the base model
        low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
            Create empty adapter weights on meta device. Useful to speed up the loading process. Leave this setting as
            False if you intend on training the model, unless the adapter weights will be replaced by different weights
            before training starts.
    name_or_pathNc              3  <   K   | ]  }t        |t                y w)N)
isinstancer   ).0modules     L/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/peft/mapping_func.py	<genexpr>z!get_peft_model.<locals>.<genexpr>H   s     
L&:fn-
Ls   zYou are trying to modify a model with PEFT for a second time. If you want to reload the model with a different config, make sure to call `.unload()` before.z>The PEFT config's `base_model_name_or_path` was renamed from 'z' to 'zT'. Please ensure that the correct base model is loaded when loading this checkpoint.z3peft config has already set base model revision to z, overwriting with revision LORAevazlora with eva initialization used with low_cpu_mem_usage=False. Setting low_cpu_mem_usage=True can improve the maximum batch size possible for eva initialization.zAdapter name z' should not be contained in the prefix zI.This may lead to reinitialization of the adapter weights during loading.)adapter_name)r   autocast_adapter_dtypelow_cpu_mem_usage)r   get_model_configbase_model_name_or_path__dict__getanymoduleswarningswarnrevisionr   r	   init_lora_weightsr
   	peft_typer   	task_typer   keysis_prompt_learningr   r   )modelpeft_configr   mixedr   r%   r   model_configold_namenew_nameprefixs              r   get_peft_modelr2      s   B --e4L22H~~!!.$7H*2K'
 
LEMMO
LLF	

 	8x#7LXJV\]e\f g` `	

 +0D0D0PMMEkFZFZE[[w  yA  xB  C  ( 
K!<V!D	E**e3!q	

 ),,[-B-BCF,&(L>)PQWPX YW W	

 e[|LL $D$I$I$KKT_TrTr%#9/
 	
 %%5k<P+K,A,AB!5+     )defaultFTNF)r+   r   r,   r   r   strr-   boolr   r6   r%   zOptional[str]r   r6   returnzPeftModel | PeftMixedModel)
__future__r   r#   typingr   transformersr   autor   configr   mappingr	   r
   mixed_modelr   
peft_modelr   tuners.tuners_utilsr   r   utilsr   r2    r3   r   <module>rC      s    #   ( 2  M ' ! : 2 "#'"#ddd d 	d
 !d d d  dr3   