Ë
    §Ùbi2  ã                  ó¼   — U d dl mZ d dlmZmZmZ d dlZddlmZ erddl	m
Z
 ddlmZ i Zded	<   i Zd
ed<   i Zd
ed<   i Zded<   dd„Z	 	 	 d	 	 	 	 	 	 	 	 	 	 	 dd„Zy)é    )Úannotations)ÚTYPE_CHECKINGÚAnyÚOptionalNé   )ÚPeftType)Ú
PeftConfig)Ú	BaseTunerz dict[PeftType, type[PeftConfig]]ÚPEFT_TYPE_TO_CONFIG_MAPPINGzdict[PeftType, type[BaseTuner]]ÚPEFT_TYPE_TO_TUNER_MAPPINGÚ PEFT_TYPE_TO_MIXED_MODEL_MAPPINGzdict[PeftType, str]ÚPEFT_TYPE_TO_PREFIX_MAPPINGc                ó$   — t        | d      di | ¤ŽS )z 
    Returns a Peft config object from a dictionary.

    Args:
        config_dict (`Dict[str, Any]`): Dictionary containing the configuration parameters.
    Ú	peft_type© )r   )Úconfig_dicts    úG/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/peft/mapping.pyÚget_peft_configr   $   s   € ô ' {°;Ñ'?Ñ@ÑOÀ;ÑOÐOó    c                ó  — | j                   s| j                  rt        d«      ‚| j                  t        j                  «       vrt        d| j                  › d«      ‚t        | j                     } ||| |||¬«      }|j                  S )a„  
    A simple API to create and inject adapter in-place into a model. Currently the API does not support prompt learning
    methods and adaption prompt. Make sure to have the correct `target_names` set in the `peft_config` object. The API
    calls `get_peft_model` under the hood but would be restricted only to non-prompt learning methods.

    Args:
        peft_config (`PeftConfig`):
            Configuration object containing the parameters of the Peft model.
        model (`torch.nn.Module`):
            The input model where the adapter will be injected.
        adapter_name (`str`, `optional`, defaults to `"default"`):
            The name of the adapter to be injected, if not provided, the default adapter name is used ("default").
        low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
            Create empty adapter weights on meta device. Useful to speed up the loading process.
        state_dict (`dict`, *optional*, defaults to `None`)
            If a state_dict is passed here, the adapters will be injected based on the entries of the state_dict. This
            can be useful when the exact `target_modules` of the PEFT method is unknown, for instance because the
            checkpoint was created without meta data. Note that the values from the state_dict are not used, only the
            keys are used to determine the correct layers that should be adapted.
    zN`create_and_replace` does not support prompt learning and adaption prompt yet.z+`inject_adapter_in_model` does not support z" yet. Please use `get_peft_model`.)Úadapter_nameÚlow_cpu_mem_usageÚ
state_dict)Úis_prompt_learningÚis_adaption_promptÚ
ValueErrorr   r   ÚkeysÚmodel)Úpeft_configr   r   r   r   Ú	tuner_clsÚ
peft_models          r   Úinject_adapter_in_modelr"   /   s“   € ð6 ×%Ò%¨×)GÒ)GÜÐiÓjÐjà×ÑÔ$>×$CÑ$CÓ$EÑEÜØ9¸+×:OÑ:OÐ9PÐPrÐsó
ð 	
ô +¨;×+@Ñ+@ÑA€Iñ Øˆ{¨ÐIZÐgqô€Jð ×ÑÐr   )r   zdict[str, Any]Úreturnr	   )ÚdefaultFN)r   r	   r   útorch.nn.Moduler   Ústrr   Úboolr   z!Optional[dict[str, torch.Tensor]]r#   r%   )Ú
__future__r   Útypingr   r   r   ÚtorchÚutilsr   Úconfigr	   Útuners.tuners_utilsr
   r   Ú__annotations__r   r   r   r   r"   r   r   r   ú<module>r/      s£   ðö #ç /Ñ /ã å ñ Ý"Ý.ð ACÐ Ð=Ó BØ>@Ð Ð;Ó @ØDFÐ  Ð"AÓ FØ35Ð Ð0Ó 5óPð "Ø#Ø48ð*Øð*àð*ð ð*ð ð	*ð
 2ð*ð ô*r   