
    bi              	       p    d Z ddlmZ ddlZddlmZ d	dej                  dedee   dej                  fdZ	y)
ad  
This module is intended to store mask functions for use inside SHiRA construction. The mask functions are required to
have a specific signature as shown below.

Required positional arguments:
    base_layer - This is the linear layer where the shira adapter will be attached. r - This parameter is used to
    determine the number of parameters in the
                 shira adapter in a way that is consistent with LoRA sizing. SHiRA is a high rank adapter. Setting this
                 parameter does not restrict the adapter rank.
Keyword arguments can be provided as needed by the particular mask function implementation.

Return:
    mask - this is a torch.tensor of the same shape as base_layer.weight that contains 0s and 1s with the same
           dtype and device as base_layer.weight

If you would like to attach SHiRA adapters to a model using PEFT methods (such as get_peft_model()), using more
arguments than the provided positional arguments, you can create the mask function reference like the following:

```
    def create_mask_function_reference(**my_kwargs):
        def mask_fn(base_layer, r):
            ... your implementation here that might use my_kwargs ...
            return mask
        return mask_fn
```
Then, you can create your peft model with custom SHiRA mask as follows:
```
    model = ...
    my_kwargs = ...
    mask_fn = create_mask_function_reference(**my_kwargs)
    peft_config = ShiraConfig(r=4, mask_type='my_custom_mask')
    peft_config.mask_fn = mask_fn
    peft_model = get_peft_model(model, peft_config)
```

Complete training examples are provided in the examples/shira/ directory.
    )OptionalN
base_layerrrandom_seedreturnc                    | j                   j                  }||d   |d   z   z  }t        j                         }||j	                  |       t        j
                  | j                   j                         |      d | j                  | j                   j                        }t        j                  |j                  | j                   j                              }t        j                  | j                   j                  dd            }	|	j                  d|j                  d      |j                  d            j                  |      }	|	S )Nr      )	generator)weightshapetorch	Generatormanual_seedrandpermnumeltodevice	ones_liketypedtype
zeros_likeviewscatter_	unsqueeze)
r   r   r   kwargsr   num_shira_weightsrandom_generatoridxvalmasks
             [/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/peft/tuners/shira/mask_functions.pyrandom_maskr#   ;   s   ##EU1Xa01($$[1>>*++113?OPQcRcd
h
h  C //#((:#4#4#:#:;
<CJ--221b9:D==CMM!,cmmA.>?DDUKDK    )N)
__doc__typingr   r   torch.nnnnModuleinttensorr#    r$   r"   <module>r-      sF   $L   BII # HSM _d_k_k r$   