
    bi'                    l    d dl mZ d dlmZmZ d dlmZmZmZ d dl	m
Z
 d dlmZ e G d de
             Zy)	    )annotations)	dataclassfield)LiteralOptionalUnion)
PeftConfig)PeftTypec                  Z    e Zd ZU dZ edddi      Zded<    edd	d
d      Zded<    edddi      Zded<    edddi      Z	ded<    edddi      Z
ded<    edddi      Zded<    edddi      Zded<    eddd i      Zded!<    eddd"i      Zd#ed$<    eddd%i      Zded&<    eddd'i      Zd(ed)<    eddd*i      Zded+<    ed,dd-i      Zded.<    eddd/i      Zded0<    eddd1i      Zded2<    ed3dd4i      Zded5<    fd6Ze fd7       Z xZS )8	OFTConfiga  
    This is the configuration class to store the configuration of a [`OFTModel`].

    Args:
        r (`int`): OFT rank, number of OFT blocks per injected layer.
        oft_block_size (`int`): OFT block size across different layers.
        module_dropout (`float`):
            The multiplicative dropout probability, by setting OFT blocks to identity during training, similar to the
            dropout layer in LoRA.
        target_modules (`Optional[Union[list[str], str]]`):
            The names of the modules to apply the adapter to. If this is specified, only the modules with the specified
            names will be replaced. When passing a string, a regex match will be performed. When passing a list of
            strings, either an exact match will be performed or it is checked if the name of the module ends with any
            of the passed strings. If this is specified as 'all-linear', then all linear modules are chosen, excluding
            the output layer. If this is not specified, modules will be chosen according to the model architecture. If
            the architecture is not known, an error will be raised -- in this case, you should specify the target
            modules manually.
        fan_in_fan_out (`bool`): Set this to True if the layer to replace stores weight like (fan_in, fan_out).
        bias (`str`): Bias type for OFT. Can be 'none', 'all' or 'oft_only'. If 'all' or 'oft_only', the
            corresponding biases will be updated during training. Be aware that this means that, even when disabling
            the adapters, the model will not produce the same output as the base model would have without adaptation.
        exclude_modules (`Optional[Union[List[str], str]]`):
            The names of the modules to not apply the adapter. When passing a string, a regex match will be performed.
            When passing a list of strings, either an exact match will be performed or it is checked if the name of the
            module ends with any of the passed strings.
        init_weights (`bool`):
            Whether to perform initialization of OFT weights.
        layers_to_transform (`Union[List[int], int]`):
            The layer indices to transform. If a list of ints is passed, it will apply the adapter to the layer indices
            that are specified in this list. If a single integer is passed, it will apply the transformations on the
            layer at this index.
        layers_pattern (`Optional[Union[List[str], str]]`):
            The layer pattern name, used only if `layers_to_transform` is different from `None`. This should target the
            `nn.ModuleList` of the model, which is often called `'layers'` or `'h'`.
        modules_to_save (`List[str]`):
            List of modules apart from adapter layers to be set as trainable and saved in the final checkpoint.
        coft (`bool`):
            Whether to use the constrained variant of OFT or not, off by default.
        eps (`float`):
            The control strength of COFT. The freedom of rotation. Only has an effect if `coft` is set to True.
        block_share (`bool`):
            Whether to share the OFT parameters between blocks or not. This is `False` by default.
    r   helpz2OFT rank, number of OFT blocks per injected layer.)defaultmetadataintr    z'OFT block size across different layers.z{You can only specify either r or oft_block_size, but not both simultaneously, because r x oft_block_size = layer dimension.)r   noteoft_block_sizeg        zwOFT multiplicative dropout, randomly setting blocks of OFT to be identity matrix, similar to the dropout layer in LoRA.floatmodule_dropoutNa	  List of module names or regex expression of the module names to replace with OFT.For example, ['q', 'v'] or '.*decoder.*(SelfAttention|EncDecAttention).*(q|v)$' This can also be a wildcard 'all-linear' which matches all linear/Conv1D layers except the output layer.zOptional[Union[list[str], str]]target_modulesFzMSet this to True if the layer to replace stores weight like (fan_in, fan_out)boolfan_in_fan_outnonez5Bias type for OFT. Can be 'none', 'all' or 'oft_only'z"Literal['none', 'all', 'oft_only']biaszQList of module names or regex expression of the module names to exclude from OFT.exclude_modulesTzWhether to initialize the weights of the OFT layers with their default initialization. Don't change this setting, except if you know exactly what you're doing.init_weightszThe layer indexes to transform, is this argument is specified, PEFT will transform only the layers indexes that are specified inside this list. If a single integer is passed, PEFT will transform only the layer at this index.zOptional[Union[list[int], int]]layers_to_transformzThe layer pattern name, used only if `layers_to_transform` is different to None and if the layer pattern is not in the common layers pattern. This should target the `nn.ModuleList` of the model, which is often called `'layers'` or `'h'`.layers_patterna  List of modules apart from OFT layers to be set as trainable and saved in the final checkpoint. For example, in Sequence Classification or Token Classification tasks, the final layer `classifier/score` are randomly initialized and as such need to be trainable and saved.zOptional[list[str]]modules_to_savez5Whether to use the constrained variant of OFT or not.coftgiUMu?zcThe control strength of COFT. The freedom of rotation. Only has an effect if `coft` is set to True.epsz:Whether to share the OFT parameters between blocks or not.block_sharezWhether to use the Cayley-Neumann Formulation of OFT or not. Set to True to improve computational efficiency but comes at costs of bigger approximation error for orthogonality.use_cayley_neumann   zkNumber of Cayley-Neumann terms to use. Higher number results in less approximation error for orthogonality.num_cayley_neumann_termsc                   t         |           t        j                  | _        t        | j                  t              rt        | j                        n| j                  | _        t        | j                  t              rt        | j                        n| j                  | _	        | j                  r| j                  st        d      | j                  dk(  r5| j                  dk(  r&t        d| j                   d| j                   d      | j                  dk7  | j                  dk7  z  s&t        d| j                   d| j                   d      y )	NzRWhen `layers_pattern` is specified, `layers_to_transform` must also be specified. r   z@Either `r` or `oft_block_size` must be non-zero. Currently, r = z and oft_block_size = .zYou can only specify either r (z) or oft_block_size (zJ), but not both simultaneously, because r x oft_block_size == in_features.)super__post_init__r
   OFT	peft_type
isinstancer   listsetr   r   r   
ValueErrorr   r   )self	__class__s    Q/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/peft/tuners/oft/config.pyr*   zOFTConfig.__post_init__   sG   !(243F3F(MC##$SWSfSf 	 *4D4H4H$)OC$$%UYUiUi 	 t'?'?qrr66Q;4..!3RSWSYSYRZZpqu  rE  rE  qF  FG  H  ! 3 3q 891$&&9NtObObNc  dn  o  :    c                @    d|vrt        d      t        |   di |S )z
        Check if the kwargs are valid for the configuration.

        Args:
            kwargs (additional keyword arguments, *optional*):
                Additional keyword arguments passed along to the child class initialization.
        r   a  OFT has been updated since PEFT 0.14.0. Your trained adapter weights are incompatible with the latest version of OFT. Please retrain your adapter weights with newer PEFT versions. Alternatively, downgrade PEFT to version 0.13.0 to use the old adapter weights. )r0   r)   check_kwargs)clskwargsr2   s     r3   r7   zOFTConfig.check_kwargs   s5     6)b 
 w#-f--r4   )__name__
__module____qualname____doc__r   r   __annotations__r   r   r   r   r   r   r   r   r   r    r!   r"   r#   r$   r&   r*   classmethodr7   __classcell__)r2   s   @r3   r   r      sB   *X 10d'efAsf= R
NC  "  N
NE  7< w
7N3  !ijND  05&*a!b0D
,  8=mn8O4  N
L$  <A  w
<8  7< n
7N3  ,1 v
,O(  QRD$  y
C  VWK   %  G
   %*  B
%c * . .r4   r   N)
__future__r   dataclassesr   r   typingr   r   r   peft.configr	   
peft.utilsr
   r   r6   r4   r3   <module>rF      s7    # ( + + "  k.
 k. k.r4   