
    bi                    h    d dl mZ d dlZd dlmZ d dlmZ d dlmZ d dl	m
Z
 e G d de             Zy)	    )annotationsN)	dataclass)Optional)
PeftConfig)PeftTypec                       e Zd ZU dZdZded<   dZded<   dZded	<   d
Zded<   d
Z	ded<   dZ
ded<   dZded<   dZded<   d
Zded<   dZded<   dZded<   dZded<   dZded<    fdZ xZS )XLoraConfiga  
    This is the configuration class to store the configuration of a `XLoraModel`. When the config is reloaded, the
    paths of the `adapters` field is disregarded in favor of the saved adapters. As such, only the keys matter during
    loading.

    Args:
        hidden_size (`int`):
            Hidden size of the base model.
        adapters (`dict`):
            Mapping of adapter names to the LoRA adapter id, as per PeftModel.load_adapter. *They will be automatically
            loaded*, to use as LoRA experts. When using from_pretrained, pass the new adapters dict as a keyword
            argument.
        enable_softmax (`bool`, *optional*, defaults to `True`):
            Enable softmax application for the X-LoRA classifier.
        enable_softmax_topk (`bool`, *optional*, defaults to `False`):
            Enable softmax application for the top-k LoRA adapters. Mutually exclusive to `enable_softmax` and must
            only be set if `top_k_lora` is.
        softmax_temperature (`float`, *optional*, defaults to 1.0):
            Softmax temperature, lower yields sharper predictions
        layerwise_scalings (`bool`, *optional*, defaults to `False`):
            If True, generate scalings for each LoRA adapter (each layer). If this is False, then scalings will be
            broadcasted, the same, to each layer.
        top_k_lora (`int`, *optional*, defaults to None):
            Sparsely select the top_k LoRA experts instead of the default dense method.
        xlora_depth (`int`, *optional*, defaults to 1):
            Depth of the X-LoRA classifier.
        xlora_size (`int`, *optional*, defaults to 2048):
            Hidden size of the X-LoRA classifier, irrelevant if `xlora_depth=1`.
        xlora_dropout_p (`float`, *optional*, defaults to 0.2):
            Dropout probability of the X-LoRA classifier, irrelevant if `xlora_depth=1`.
        use_trainable_adapters (`bool`, *optional*, defaults to False):
            Make the adapters trainable.
        scaling_pass_value (`float`, *optional*, defaults to 0):
            Scaling pass value.
        global_scaling_weight (`float`, *optional*, defaults to 1):
            Weight to multiply output of each LoRA adapter by.
    Ninthidden_sizezdict[str, str]adaptersTboolenable_softmaxFenable_softmax_topklayerwise_scalings   xlora_depthi   
xlora_sizeg?floatxlora_dropout_puse_trainable_adaptersg      ?softmax_temperaturezOptional[int]
top_k_lorag        scaling_pass_valueglobal_scaling_weightc                   t         |           t        j                  | _        | j
                  t        j                  d       d| _        | j                  t        j                  d       i | _        | j                  r!| j                  t        j                  d       | j                  r!| j                  rt        j                  d       | j                  &| j                  dk  rt        j                  d       y y y )NzqNo value was provided for `hidden_size`. This will be set to 4096 by default, please ensure that this is correct.i   zhNo value was provided for for `adapters`. This will be set to empty, please ensure that this is correct.z5`enable_softmax_topk` enabled `top_k_lora` is not setzc`enable_softmax_topk` and `enable_softmax` are both enabled. This will result in worse performance.r   z&`top_k_lora` value must be at least 1.)super__post_init__r   XLORA	peft_typer   warningswarnr   r   r   r   )self	__class__s    S/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/peft/tuners/xlora/config.pyr   zXLoraConfig.__post_init__N   s    !#MM D  $D== MMz DM##(?MMQR##(;(;MMu ??&4??Q+>MMBC ,?&    )__name__
__module____qualname____doc__r   __annotations__r   r   r   r   r   r   r   r   r   r   r   r   r   __classcell__)r#   s   @r$   r	   r	      s    $L K#Hn#ND %%$$KJ OU #(D(!$$ $J$ ###&5&D Dr%   r	   )
__future__r   r    dataclassesr   typingr   peft.configr   peft.utils.peft_typesr   r	    r%   r$   <module>r2      s:    #  !  " * MD* MD MDr%   