
    biI                        d dl mZmZmZ d dlZd dlmc mZ d dlmZ ddl	m
Z
mZ ddlmZ  e       rd dlmZmZ  ej"                  e      Zdej(                  fd	Zdej(                  fd
ZddefdZ G d dej                  j(                        Z G d dej(                        Z G d dej(                        Z G d dej8                        Z G d dej<                        Zy)    )OptionalTupleUnionN)nn   )	deprecatelogging)is_transformers_available)CLIPTextModelCLIPTextModelWithProjectiontext_encoderc                 &   g }t        | t        t        f      rXt        | j                  j
                  j                        D ]*  \  }}d| d}|j                  }|j                  ||f       , |S t        d| j                  j                         )Ntext_model.encoder.layers.z
.self_attnz.do not know how to get attention modules for: )
isinstancer   r   	enumerate
text_modelencoderlayers	self_attnappend
ValueError	__class____name__)r   attn_modulesilayernamemods         P/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/models/lora.pytext_encoder_attn_modulesr    )   s    L,0K LM!,"9"9"A"A"H"HI 	-HAu/s*=D//Cs,	-  I,J`J`JiJiIjkll    c                 &   g }t        | t        t        f      rXt        | j                  j
                  j                        D ]*  \  }}|j                  }d| d}|j                  ||f       , |S t        d| j                  j                         )Nr   z.mlpz(do not know how to get mlp modules for: )r   r   r   r   r   r   r   mlpr   r   r   r   )r   mlp_modulesr   r   mlp_modr   s         r   text_encoder_mlp_modulesr&   7   s    K,0K LM!,"9"9"A"A"H"HI 	0HAuiiG/s$7Dg/	0  CLDZDZDcDcCdeffr!   
lora_scalec                    t        |       D ]d  \  }}t        |j                  t              s!||j                  _        ||j
                  _        ||j                  _        ||j                  _        f t        |       D ]B  \  }}t        |j                  t              s!||j                  _        ||j                  _        D y N)r    r   q_projPatchedLoraProjectionr'   k_projv_projout_projr&   fc1fc2)r   r'   _attn_module
mlp_modules        r   adjust_lora_scale_text_encoderr4   E   s    3LA 9;k((*?@,6K),6K),6K).8K  +9 2,? 3:jnn&;<(2JNN%(2JNN%3r!   c                   H     e Zd Zd
 fd	Zdddd fd
ZddZd Zd	 Z xZS )r+   Nc                 b   d}t        dd|       t        	| 	          ddlm} || _        | j
                  j                  j                  }| | j
                  j                  j                  } || j
                  j                  | j
                  j                  ||||      | _        || _        y )NzsUse of `PatchedLoraProjection` is deprecated. Please switch to PEFT backend by installing PEFT: `pip install peft`.r+   1.0.0r   )LoRALinearLayer)network_alphadevicedtyperank)r   super__init__models.lorar8   regular_linear_layerweightr:   r;   in_featuresout_featureslora_linear_layerr'   )
selfr@   r'   r9   r<   r;   deprecation_messager8   r:   r   s
            r   r>   zPatchedLoraProjection.__init__T   s     T)74GH1$8!**1188=--44::E!0%%11%%22'"
 %r!    Fdestinationprefix	keep_varsc                z    | j                    | j                  j                  ||||dS t        |   ||||dS )NrH   )rD   r@   
state_dictr=   )rE   rI   rJ   rK   argsr   s        r   rM   z PatchedLoraProjection.state_dicto   sL    !!)74,,77;v  w!4[[deer!   c           	      4   | j                   y | j                  j                  j                  j                  | j                  j                  j                  j
                  }}| j                  j                  j                  j                         }| j                   j                  j                  j                  j                         }| j                   j                  j                  j                  j                         }| j                   j                  0|| j                   j                  z  | j                   j                  z  }||t        j                  |d d d f   |d d d f         d   z  z   }|r@t        j                  |      j                         j                         rt!        d|  d      |j#                  ||      | j                  j                  _        d | _         |j%                         | _        |j%                         | _        || _        y Nr   aThis LoRA weight seems to be broken. Encountered NaN values when trying to fuse LoRA weights for  .LoRA weights will not be fused.r:   r;   )rD   r@   rA   datar;   r:   floatupdownr9   r<   torchbmmisnananyitemr   tocpuw_upw_downr'   	rE   r'   safe_fusingr;   r:   w_origr_   r`   fused_weights	            r   
_fuse_loraz PatchedLoraProjection._fuse_loraw   s   !!)1188==CCTE^E^EeEeEjEjEqEqv**1166<<>%%((//44::<'',,3388>>@!!//;$00>>>AWAWA\A\\DeiiT1WvdTUg.WXY.Z!Z[5;;|488:??AOOSf U22  1=vUZ0[!!((- "& HHJ	jjl$r!   c           	      N   t        | dd       t        | dd       y | j                  j                  j                  }|j                  |j
                  }}| j                  j                  |      j                         }| j                  j                  |      j                         }|j                         | j                  t        j                  |d d d f   |d d d f         d   z  z
  }|j                  ||      | j                  j                  _        d | _        d | _	        y Nr_   r`   r:   r   rS   )getattrr@   rA   rT   r;   r:   r_   r]   rU   r`   r'   rX   rY   rE   rd   r;   r:   r_   r`   unfused_weights          r   _unfuse_loraz"PatchedLoraProjection._unfuse_lora   s    fd+7GD(TX<Y<e0077<<$**L,?,?vyy||6|*002'--/%++-599TRVXYRY]\bcgijcj\kClmnCo1op0>0A0AW\0A0]!!((-	r!   c                     | j                   d| _         | j                  | j                  |      S | j                  |      | j                   | j                  |      z  z   S )N      ?)r'   rD   r@   )rE   inputs     r   forwardzPatchedLoraProjection.forward   s[    ??"!DO!!),,U33((/4??TE[E[\aEb3bccr!   )   N   Nrn   F)	r   
__module____qualname__r>   rM   re   rl   rp   __classcell__r   s   @r   r+   r+   S   s(    %6 -1u f%@ dr!   r+   c                        e Zd ZdZ	 	 	 	 ddedededee   deeej                  e
f      deej                     f fdZd	ej                  d
ej                  fdZ xZS )r8   a  
    A linear layer that is used with LoRA.

    Parameters:
        in_features (`int`):
            Number of input features.
        out_features (`int`):
            Number of output features.
        rank (`int`, `optional`, defaults to 4):
            The rank of the LoRA layer.
        network_alpha (`float`, `optional`, defaults to `None`):
            The value of the network alpha used for stable learning and preventing underflow. This value has the same
            meaning as the `--network_alpha` option in the kohya-ss trainer script. See
            https://github.com/darkstorm2150/sd-scripts/blob/main/docs/train_network_README-en.md#execute-learning
        device (`torch.device`, `optional`, defaults to `None`):
            The device to use for the layer's weights.
        dtype (`torch.dtype`, `optional`, defaults to `None`):
            The dtype to use for the layer's weights.
    rB   rC   r<   r9   r:   r;   c                    t         |           d}t        dd|       t        j                  ||d||      | _        t        j                  ||d||      | _        || _        || _        || _	        || _
        t        j                  j                  | j
                  j                  d|z         t        j                  j                  | j                  j                         y )NzmUse of `LoRALinearLayer` is deprecated. Please switch to PEFT backend by installing PEFT: `pip install peft`.r8   r7   F)biasr:   r;   rq   std)r=   r>   r   r   LinearrW   rV   r9   r<   rC   rB   initnormal_rA   zeros_)	rE   rB   rC   r<   r9   r:   r;   rF   r   s	           r   r>   zLoRALinearLayer.__init__   s     	 N#W.ABIIk4eFRWX	))D,U6QVW +	(&
		((a$h7
tww~~&r!   hidden_statesreturnc                 .   |j                   }| j                  j                  j                   }| j                  |j                  |            }| j	                  |      }| j
                  || j
                  | j                  z  z  }|j                  |      S r)   r;   rW   rA   r]   rV   r9   r<   rE   r   
orig_dtyper;   down_hidden_statesup_hidden_statess         r   rp   zLoRALinearLayer.forward       "((
		  &&!YY}'7'7'>?77#56) 2 2TYY >>"":..r!   )rr   NNN)r   rt   ru   __doc__intr   rU   r   rX   r:   strr;   r>   Tensorrp   rv   rw   s   @r   r8   r8      s    0 )-59'+'' ' 	'
  ' u||S012' $'4
/U\\ 
/ell 
/r!   r8   c                        e Zd ZdZ	 	 	 	 	 ddedededeeeeef   f   deeeeef   f   deeeeef   ef   dee	   f fd	Z
d
ej                  dej                  fdZ xZS )LoRAConv2dLayera"  
    A convolutional layer that is used with LoRA.

    Parameters:
        in_features (`int`):
            Number of input features.
        out_features (`int`):
            Number of output features.
        rank (`int`, `optional`, defaults to 4):
            The rank of the LoRA layer.
        kernel_size (`int` or `tuple` of two `int`, `optional`, defaults to 1):
            The kernel size of the convolution.
        stride (`int` or `tuple` of two `int`, `optional`, defaults to 1):
            The stride of the convolution.
        padding (`int` or `tuple` of two `int` or `str`, `optional`, defaults to 0):
            The padding of the convolution.
        network_alpha (`float`, `optional`, defaults to `None`):
            The value of the network alpha used for stable learning and preventing underflow. This value has the same
            meaning as the `--network_alpha` option in the kohya-ss trainer script. See
            https://github.com/darkstorm2150/sd-scripts/blob/main/docs/train_network_README-en.md#execute-learning
    rB   rC   r<   kernel_sizestridepaddingr9   c                    t         	|           d}t        dd|       t        j                  |||||d      | _        t        j                  ||ddd      | _        || _        || _        t        j                  j                  | j
                  j                  d|z  	       t        j                  j                  | j                  j                         y )
NzmUse of `LoRAConv2dLayer` is deprecated. Please switch to PEFT backend by installing PEFT: `pip install peft`.r   r7   F)r   r   r   rz   rq   rq   )r   r   rz   rq   r{   )r=   r>   r   r   Conv2drW   rV   r9   r<   r~   r   rA   r   )
rE   rB   rC   r<   r   r   r   r9   rF   r   s
            r   r>   zLoRAConv2dLayer.__init__  s     	 N#W.ABIIk4[QWahotu	 ))D,F6X]^ +	
		((a$h7
tww~~&r!   r   r   c                 .   |j                   }| j                  j                  j                   }| j                  |j                  |            }| j	                  |      }| j
                  || j
                  | j                  z  z  }|j                  |      S r)   r   r   s         r   rp   zLoRAConv2dLayer.forward  r   r!   )rr   r   r   r   N)r   rt   ru   r   r   r   r   r   r   rU   r>   rX   r   rp   rv   rw   s   @r   r   r      s    4 39.445)-'' ' 	'
 3c3h/0' c5c?*+' sE#s(OS01'  '8
/U\\ 
/ell 
/r!   r   c                        e Zd ZdZdddee   f fdZdee   fdZddede	fd	Z
d
 Zddej                  dedej                  fdZ xZS )LoRACompatibleConvz;
    A convolutional layer that can be used with LoRA.
    N
lora_layerr   c                P    d}t        dd|       t        |   |i | || _        y )NzpUse of `LoRACompatibleConv` is deprecated. Please switch to PEFT backend by installing PEFT: `pip install peft`.r   r7   r   r=   r>   r   rE   r   rN   kwargsrF   r   s        r   r>   zLoRACompatibleConv.__init__0  s3     Q&1DE$)&)$r!   c                 0    d}t        dd|       || _        y NznUse of `set_lora_layer()` is deprecated. Please switch to PEFT backend by installing PEFT: `pip install peft`.set_lora_layerr7   r   r   rE   r   rF   s      r   r   z!LoRACompatibleConv.set_lora_layer7  s      O"G-@A$r!   r'   rb   c                 <   | j                   y | j                  j                  j                  | j                  j                  j                  }}| j                  j                  j                         }| j                   j                  j                  j                  j                         }| j                   j                  j                  j                  j                         }| j                   j                  0|| j                   j                  z  | j                   j                  z  }t        j                  |j                  d      |j                  d            }|j                  |j                        }|||z  z   }	|r@t        j                  |	      j!                         j#                         rt%        d|  d      |	j'                  ||      | j                  _        d | _         |j)                         | _        |j)                         | _        || _        y )Nrq   	start_dimrQ   rR   rS   )r   rA   rT   r;   r:   rU   rV   rW   r9   r<   rX   mmflattenreshapeshaperZ   r[   r\   r   r]   r^   r_   r`   _lora_scale)
rE   r'   rb   r;   r:   rc   r_   r`   fusionrd   s
             r   re   zLoRACompatibleConv._fuse_lora=  s   ??"((..0@0@0G0Gv!!'')!!((--335%%,,11779??((4$//777$//:N:NND$,,,3V^^a^5PQ/f!455;;|488:??AOOSf U22  (??&?F  HHJ	jjl%r!   c                    t        | dd       t        | dd       y | j                  j                  }|j                  j                  |j                  j                  }}| j
                  j                  |      j                         | _        | j                  j                  |      j                         | _        t        j                  | j
                  j                  d      | j                  j                  d            }|j                  |j                        }|j                         | j                  |z  z
  }|j                  ||      | j                  _        d | _        d | _        y )Nr_   r`   rh   rq   r   rS   )ri   rA   rT   r;   r:   r_   r]   rU   r`   rX   r   r   r   r   r   )rE   rd   r;   r:   r   rk   s         r   rl   zLoRACompatibleConv._unfuse_lora_  s    fd+7GD(TX<Y<e{{''$))//1B1B1I1IvIILLL/557	kknnV,224$))++a+8$++:M:MXY:M:Z[!3!35%++-1A1AF1JK),,F%,H	r!   r   scaler   c           	      z   | j                   dk7  r/t        j                  || j                  | j                         }d}n| j                  }t        j
                  || j                  | j                  | j                  || j                  | j                        }| j                  |S ||| j                  |      z  z   S )Nzeros)mode)r   r   )padding_modeFpad _reversed_padding_repeated_twicer   conv2drA   rz   r   dilationgroupsr   )rE   r   r   r   original_outputss        r   rp   zLoRACompatibleConv.forwardq  s    'EE-1V1V]a]n]noMGllG884;;		4;;X\XcXc
 ??"###ut}/M'MNNr!   rs   rn   )r   rt   ru   r   r   r   r>   r   rU   boolre   rl   rX   r   rp   rv   rw   s   @r   r   r   +  sp     GK %(?*C %%/)B % &U  &t  &D$OU\\ O% O%,, Or!   r   c                        e Zd ZdZdddee   f fdZdee   fdZddede	fd	Z
d
 Zddej                  dedej                  f fdZ xZS )LoRACompatibleLinearz4
    A Linear layer that can be used with LoRA.
    Nr   r   c                P    d}t        dd|       t        |   |i | || _        y )NzrUse of `LoRACompatibleLinear` is deprecated. Please switch to PEFT backend by installing PEFT: `pip install peft`.r   r7   r   r   s        r   r>   zLoRACompatibleLinear.__init__  s3     S('3FG$)&)$r!   c                 0    d}t        dd|       || _        y r   r   r   s      r   r   z#LoRACompatibleLinear.set_lora_layer  s      O"G-@A$r!   r'   rb   c           	         | j                   y | j                  j                  j                  | j                  j                  j                  }}| j                  j                  j                         }| j                   j                  j                  j                  j                         }| j                   j                  j                  j                  j                         }| j                   j                  0|| j                   j                  z  | j                   j                  z  }||t        j                  |d d d f   |d d d f         d   z  z   }|r@t        j                  |      j                         j                         rt        d|  d      |j!                  ||      | j                  _        d | _         |j#                         | _        |j#                         | _        || _        y rP   )r   rA   rT   r;   r:   rU   rV   rW   r9   r<   rX   rY   rZ   r[   r\   r   r]   r^   r_   r`   r   ra   s	            r   re   zLoRACompatibleLinear._fuse_lora  s   ??"((..0@0@0G0Gv!!'')!!((--335%%,,11779??((4$//777$//:N:NNDeiiT1WvdTUg.WXY.Z!Z[5;;|488:??AOOSf U22  (??&?F  HHJ	jjl%r!   c           	      &   t        | dd       t        | dd       y | j                  j                  }|j                  |j                  }}| j
                  j                  |      j                         }| j                  j                  |      j                         }|j                         | j                  t        j                  |d d d f   |d d d f         d   z  z
  }|j                  ||      | j                  _        d | _        d | _        y rg   )ri   rA   rT   r;   r:   r_   r]   rU   r`   r   rX   rY   rj   s          r   rl   z!LoRACompatibleLinear._unfuse_lora  s    fd+7GD(TX<Y<e{{''$**L,?,?vyy||6|*002'--/%++-1A1AEIIdSWYZSZm]cdhjkdk]lDmnoDp1pq),,F%,H	r!   r   r   r   c                     | j                   t        | 	  |      }|S t        | 	  |      || j                  |      z  z   }|S r)   )r   r=   rp   )rE   r   r   outr   s       r   rp   zLoRACompatibleLinear.forward  sF    ??"'/-0CJ'/-0EDOOM<Z4Z[CJr!   rs   r   )r   rt   ru   r   r   r8   r>   r   rU   r   re   rl   rX   r   rp   rv   rw   s   @r   r   r     sq     GK %(?*C %%/)B %
&U &t &@ U\\ % %,,  r!   r   r   ) typingr   r   r   rX   torch.nn.functionalr   
functionalr   utilsr   r	   utils.import_utilsr
   transformersr   r   
get_loggerr   loggerModuler    r&   rU   r4   r+   r8   r   r   r   r}   r    r!   r   <module>r      s   0 * )     & : G 
		H	%BII 299 3U 3YdEHHOO Ydx9/bii 9/x=/bii =/@TO TOnG299 Gr!   