
    bi7                         d Z ddlZddlZddlmZ ddlmZ ddlmZ ddl	m
Z
mZmZ ddlmZ  ej                  e      Z e       rddlZd	 Zd
 Zddee   fdZ	 ddZd ZddZd Zd ZdeddfdZ	 ddZd Zd Z y)z3
PEFT utilities: Utilities related to peft library
    N)Optional)version   )logging)is_peft_availableis_peft_versionis_torch_available)empty_device_cachec           
         ddl m} d}| j                         D ]  }t        ||      st	        |d      } n |rlddlm} | j                         D cg c]  \  }}d|vs| }}}|D ]8  }	  || |      \  }}	}
t	        |	d      st        ||
|	j                                : | S ddlm} | j                         D ]  \  }}t        t        |j!                                     dkD  rt#        |       d}t        ||      rt        |t$        j&                  j(                        rt$        j&                  j)                  |j*                  |j,                  |j.                  du	      j1                  |j2                  j4                        }|j2                  |_        |j.                  |j.                  |_        d
}nt        ||      rt        |t$        j&                  j6                        rt$        j&                  j7                  |j8                  |j:                  |j<                  |j>                  |j@                  |jB                  |jD                        j1                  |j2                  j4                        }|j2                  |_        |j.                  |j.                  |_        d
}|st        | |       ~tG                 | S c c}}w # t        $ r Y sw xY w)zd
    Recursively replace all instances of `LoraLayer` with corresponding new layers in `model`.
    r   BaseTunerLayerF
base_layer)_get_submoduleslora)	LoraLayerN)biasT)$peft.tuners.tuners_utilsr   modules
isinstancehasattr
peft.utilsr   named_modulesAttributeErrorsetattrget_base_layerpeft.tuners.lorar   named_childrenlenlistchildrenrecurse_remove_peft_layerstorchnnLinearin_featuresout_featuresr   toweightdeviceConv2din_channelsout_channelskernel_sizestridepaddingdilationgroupsr
   )modelr   has_base_layer_patternmoduler   key_key_listparenttargettarget_namer   namemodule_replaced
new_modules                  U/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/utils/peft_utils.pyr!   r!   #   sw    8"--/ fn-%,V\%B"
 .&+&9&9&;QFCvS?PCQQ 	FC.=eS.I+ v|,V-B-B-DE	Fh LU 	/!002 '	%LD&4)*+a/*62#O&),FEHHOO1T"XX__&&''D0 -  "V]]))*	 
 %+MM
!;;*&,kkJO"&FI.:fehhoo3V"XX__&&''&&MMNNOOMM "V]]))*  %+MM
!;;*&,kkJO"&tZ0"$O'	%P Lk R " s   K
 K
-K	KKc                     ddl m} |dk(  ry| j                         D ]   }t        ||      s|j	                  |       " y)z
    Adjust the weightage given to the LoRA layers of the model.

    Args:
        model (`torch.nn.Module`):
            The model to scale.
        weight (`float`):
            The weight to be given to the LoRA layers.
    r   r         ?N)r   r   r   r   scale_layer)r2   r(   r   r4   s       r>   scale_lora_layersrB   j   s>     8}--/ 'fn-v&'    r(   c                     ddl m} ||dk(  ry| j                         D ]I  }t        ||      s|dk7  r|j	                  |       '|j
                  D ]  }|j                  |d        K y)a  
    Removes the previously passed weight given to the LoRA layers of the model.

    Args:
        model (`torch.nn.Module`):
            The model to scale.
        weight (`float`, *optional*):
            The weight to be given to the LoRA layers. If no scale is passed the scale of the lora layer will be
            re-initialized to the correct value. If 0.0 is passed, we will re-initialize the scale with the correct
            value.
    r   r   Nr@   )r   r   r   r   unscale_layeractive_adapters	set_scale)r2   r(   r   r4   adapter_names        r>   unscale_lora_layersrI   ~   sn     8~3--/ 8fn-{$$V,$*$:$: 8L$$\3788rC   c           	         i }i }t        | j                               d   xt        t        | j                                     dkD  rt	        j
                  | j                               j                         d   d   t        t        fd| j                                     }|j                         D 	ci c]  \  }}	|j                  d      d   |	 }}}	|vt        |      dkD  rgt        t        |j                                     dkD  rt	        j
                  |j                               j                         d   d   t        t        fd|j                                     }|rd|j                         D 	ci c]H  \  }}	dj                  |j                  d      d   j                  d            j                  dd	      |	J }}}	n~|j                         D 	ci c];  \  }}	dj                  |j                  d
      d   j                  d      d d       |	= }}}	n't        |j                               j                         t        |j                         D 
ch c]  }
|
j                  d      d    c}
      }t        d |D              }t        d |D              }|||||d}|S c c}	}w c c}	}w c c}	}w c c}
w )Nr   r   c                     | d   k7  S Nr    )xrs    r>   <lambda>z!get_peft_kwargs.<locals>.<lambda>   s    QqTQY rC   z.lora_B.c                     | d   k7  S rL   rM   )rN   
lora_alphas    r>   rP   z!get_peft_kwargs.<locals>.<lambda>   s    !A$*2D rC   .z.lora_A.z.alpha z.down.z.lorac              3   $   K   | ]  }d |v  
 yw)lora_magnitude_vectorNrM   .0ks     r>   	<genexpr>z"get_peft_kwargs.<locals>.<genexpr>   s     IA*a/Is   c              3   J   K   | ]  }d |v xr |j                  d        yw)lora_Bz.biasN)endswithrX   s     r>   r[   z"get_peft_kwargs.<locals>.<genexpr>   s%     SaHM9ajj&99Ss   !#)rO   rR   rank_patternalpha_patterntarget_modulesuse_dora	lora_bias)r   valuesr   setcollectionsCountermost_commondictfilteritemssplitjoinreplacepopkeysany)	rank_dictnetwork_alpha_dictpeft_state_dictis_unetmodel_state_dictrH   r_   r`   rZ   vr;   ra   rb   rc   lora_config_kwargsrR   rO   s                  @@r>   get_peft_kwargsry      s    LM)**,-a00A

3y!"#a'	 0 0 23??A!DQG F#6	8IJK>J>P>P>RSda
+A.1SS%#.@*AA*Es%,,./014$,,-?-F-F-HIUUWXYZ[\]J !(DFXF^F^F`!abM !. 3 3 5!1 HHQWWZ0399#>?GGRTUWXX! !
 `m_r_r_t uW[WXZ[!''(*;A*>*D*DS*I#2*N!OQR!R u u/6689==?Jo>R>R>TUd4::g.q1UVNIIIHS?SSI  $&( E T!
 !v Vs   :J>AK*A K
,Kc                     ddl m} | j                         D ](  }t        ||      sdt	        |j
                         c S  y)Nr   r   default_	default_0)r   r   r   r   r   rO   )r2   r   r4   s      r>   get_adapter_namer}      s>    7--/ .fn-c&((m_--. rC   c                     ddl m} | j                         D ]6  }t        ||      st	        |d      r|j                  |       /| |_        8 y )Nr   r   enable_adapters)enabled)r   r   r   r   r   r   disable_adapters)r2   r   r   r4   s       r>   set_adapter_layersr      sJ    7--/ 6fn-v01&&w&7.5+'6rC   c                 V   ddl m} | j                         D ]7  }t        ||      st	        |d      r|j                  |       .t        d       t        | dd      rLt	        | d      r?| j                  j                  |d        t        | j                        dk(  r
| `d | _        y y y y )Nr   r   delete_adapterzdThe version of PEFT you are using is not compatible, please use a version that is greater than 0.6.1_hf_peft_config_loadedFpeft_config)r   r   r   r   r   r   
ValueErrorgetattrr   ro   r   r   )r2   rH   r   r4   s       r>   delete_adapter_layersr      s    7--/ fn-v/0%%l3 z  u.675-;XlD1 u  !Q&!+/E( '	 <Y6rC   c           	         ddl m} d }| j                         D ]d  \  }}t        ||      st	        |d      r|j                  |       n||_        t        ||      D ]  \  }}|j                  | |||               f y )Nr   r   c                     t        | t              s| S | j                         D ]  \  }}||v s|c S  |j                  d      }|d    d|d    d|d    }| j	                  |d      }|S )NrS   r   r   z.attentions.   r@   )r   ri   rk   rl   get)weight_for_adaptermodule_name
layer_nameweight_partsr5   block_weights          r>   get_module_weightz<set_weights_and_activate_adapters.<locals>.get_module_weight   s    ,d3%%#5#;#;#= 	J[(	 !!#&q
!E!H:\%(<)--c37rC   set_adapter)	r   r   r   r   r   r   active_adapterziprG   )	r2   adapter_namesweightsr   r   r   r4   rH   r(   s	            r>   !set_weights_and_activate_adaptersr      s    7   %224 
WVfn-v}-""=1(5% ),M7(C W$f  /@/UVW
WrC   min_versionreturnc                     t               st        d      t        j                  t        j
                  j                  d            t        j                  |       kD  }|st        d|        y)z
    Checks if the version of PEFT is compatible.

    Args:
        version (`str`):
            The version of PEFT to check against.
    z@PEFT is not installed. Please install it with `pip install peft`peftz_The version of PEFT you are using is not compatible, please use a version that is greater than N)r   r   r   parse	importlibmetadata)r   is_peft_version_compatibles     r>   check_peft_versionr     sl     [\\!(y/A/A/I/I&/Q!RU\UbUbcnUo!o% M#
 	
 &rC   c                    ddl m} ||}nt        ||| |||      }t        |       d|v r|d   rt	        dd      rt        d      d|v r|d   rt	        d	d
      rt        d      	  |di |S # t        $ r}	t        d      |	d }	~	ww xY w)Nr   )
LoraConfig)rs   rt   ru   rv   rH   rb   <z0.9.0z,DoRA requires PEFT >= 0.9.0. Please upgrade.rc   z<=z0.13.2z2lora_bias requires PEFT >= 0.14.0. Please upgrade.z-`LoraConfig` class could not be instantiated.rM   )r   r   ry   %_maybe_raise_error_for_ambiguous_keysr   r   	TypeError)

state_dictnetwork_alphasr   rank_pattern_dictru   rv   rH   r   rx   es
             r>   _create_lora_configr   *  s      %,-&-%
 **<= '',>z,J3(KLL((-?-L4*QRRP/.// PGHaOPs   )A1 1	B:BBc                    | d   j                         }| d   }t        |j                               D ]N  }|D cg c]
  }||k(  s	| }}|D cg c]  }||v s||k7  s| }}|s5|s8t        dd      sEt	        d       y c c}w c c}w )Nr_   ra   r   z0.14.1zzThere are ambiguous keys present in this LoRA. To load it, please update your `peft` installation - `pip install -U peft`.)copyr   rp   r   r   )configr_   ra   r5   modexact_matchessubstring_matchess          r>   r   r   L  s    .)..0L,-NL%%'(  )7E#*EE,:XScSjSTWZSXX.sH-  Q  FXs   
BB	B	B	B	c                 R   d}| t        | dd       }|r0|D cg c]  }d|v s||v s| }}|rddj                  |       d}t        | dd       }|r3|D cg c]  }d|v s||v s| }}|r|ddj                  |       d	z  }|rt        j                  |       y y c c}w c c}w )
NrT   unexpected_keyslora_zSLoading adapter weights from state_dict led to unexpected keys found in the model: z, z. missing_keyszJLoading adapter weights from state_dict led to missing keys in the model: rS   )r   rm   loggerwarning)incompatible_keysrH   warn_msgr   rZ   lora_unexpected_keysr   lora_missing_keyss           r>   _maybe_warn_for_unhandled_keysr   `  s    H$!"35FM/>#e!'Q,S_cdSdA#e #e#		"678<  0.$G,8 _qGqL\]^M^ _ _ 		"345Q8
 x  # $f !`s!   	BBB	B$!B$&B$)N)TNN)T)!__doc__rf   r   typingr   	packagingr   rT   r   import_utilsr   r   r	   torch_utilsr
   
get_logger__name__r   r"   r!   rB   floatrI   ry   r}   r   r   r   strr   r   r   r   rM   rC   r>   <module>r      s         P P + 
		H	%DN'(8x 88 gk/d	60,W@
C 
D 
* ptPD(!rC   