
    bi                      V   d dl Z d dlZd dlZd dlZd dlmZ d dlmZmZm	Z	m
Z
mZ d dlZd dlZd dlmZ d dlmZ d dlmZ ddlmZmZ ddlmZmZmZmZmZmZmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z( dd	l)m*Z* dd
l+m,Z,  e"       rd dl-m.Z.  e        rd dl/m0Z0  e       r
d dl1m2Z2m3Z3m4Z4  e$jj                  e6      Z7dZ8dZ9dZ:d#dZ;d Z<	 	 d$dee	e=   e=f   de
d   de
ee>e	e>   e	d   f      fdZ?d%de
d   fdZ@d%de
d   fdZAd ZB	 d%dZC	 d&dZDd ZE	 	 	 	 	 	 	 	 d'deFfdZGd  ZH G d! d"      ZIy)(    N)Path)CallableDictListOptionalUnion)
model_info)HF_HUB_OFFLINE   )
ModelMixinload_state_dict)USE_PEFT_BACKEND_get_model_fileconvert_state_dict_to_diffusersconvert_state_dict_to_peftdelete_adapter_layers	deprecateget_adapter_nameis_accelerate_availableis_peft_availableis_peft_versionis_transformers_availableis_transformers_versionloggingrecurse_remove_peft_layersscale_lora_layersset_adapter_layers!set_weights_and_activate_adapters)_create_lora_config)_load_sft_state_dict_metadata)PreTrainedModel)BaseTunerLayer)AlignDevicesHook
CpuOffloadremove_hook_from_modulezpytorch_lora_weights.binz pytorch_lora_weights.safetensorslora_adapter_metadatac                 B   d|i}| j                         D ]  }t        |t              s|dk7  r|j                  |       t	        t        j                  |j                        j                        }d|v r||d<   nd|vr|t        d       |j                  di |  y)ai  
    Fuses LoRAs for the text encoder.

    Args:
        text_encoder (`torch.nn.Module`):
            The text encoder module to set the adapter layers for. If `None`, it will try to get the `text_encoder`
            attribute.
        lora_scale (`float`, defaults to 1.0):
            Controls how much to influence the outputs with the LoRA parameters.
        safe_fusing (`bool`, defaults to `False`):
            Whether to check fused weights for NaN values before fusing and if values are NaN not fusing them.
        adapter_names (`List[str]` or `str`):
            The names of the adapters to use.
    
safe_merge      ?adapter_namesNzThe `adapter_names` argument is not supported with your PEFT version. Please upgrade to the latest version of PEFT. `pip install -U peft` )
modules
isinstancer"   scale_layerlistinspect	signaturemerge
parameters
ValueError)text_encoder
lora_scalesafe_fusingr*   merge_kwargsmodulesupported_merge_kwargss          V/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/loaders/lora_base.pyfuse_text_encoder_lorar<   D   s     !+.L&&( )fn-S "":. &*'*;*;FLL*I*T*T%U""880=_- (>>=C\ Z 
 FLL(<(!)    c                 p    | j                         D ]#  }t        |t              s|j                          % y)z
    Unfuses LoRAs for the text encoder.

    Args:
        text_encoder (`torch.nn.Module`):
            The text encoder module to set the adapter layers for. If `None`, it will try to get the `text_encoder`
            attribute.
    N)r,   r-   r"   unmerge)r5   r9   s     r;   unfuse_text_encoder_lorar@   h   s0     &&( fn-NNr=   r*   r5   r!   text_encoder_weightsc                 z    |t        d      d }t        | t              r| gn| }  || |      }t        || |       y)a  
    Sets the adapter layers for the text encoder.

    Args:
        adapter_names (`List[str]` or `str`):
            The names of the adapters to use.
        text_encoder (`torch.nn.Module`, *optional*):
            The text encoder module to set the adapter layers for. If `None`, it will try to get the `text_encoder`
            attribute.
        text_encoder_weights (`List[float]`, *optional*):
            The weights to use for the text encoder. If `None`, the weights are set to `1.0` for all the adapters.
    NzrThe pipeline does not have a default `pipe.text_encoder` class. Please make sure to pass a `text_encoder` instead.c                     t        |t              s|gt        |       z  }t        |       t        |      k7  r#t        dt        |        dt        |             |D cg c]  }||nd
 }}|S c c}w )NLength of adapter names + is not equal to the length of the weights r)   )r-   r/   lenr4   )r*   weightsws      r;   process_weightsz6set_adapters_for_text_encoder.<locals>.process_weights   s     '4(i#m"44G}W-*3}+=*>>ijmnujviwx  9@@113.@@ As   A/)r4   r-   strr   )r*   r5   rA   rI   s       r;   set_adapters_for_text_encoderrK   v   sQ    "  A
 	
" (2-'E]O=M*=:NO%lMCWXr=   c                 8    | t        d      t        | d       y)a  
    Disables the LoRA layers for the text encoder.

    Args:
        text_encoder (`torch.nn.Module`, *optional*):
            The text encoder module to disable the LoRA layers for. If `None`, it will try to get the `text_encoder`
            attribute.
    NText Encoder not found.Fenabledr4   r   r5   s    r;   disable_lora_for_text_encoderrR      s      233|U3r=   c                 8    | t        d      t        | d       y)a  
    Enables the LoRA layers for the text encoder.

    Args:
        text_encoder (`torch.nn.Module`, *optional*):
            The text encoder module to enable the LoRA layers for. If `None`, it will try to get the `text_encoder`
            attribute.
    NrM   TrN   rP   rQ   s    r;   enable_lora_for_text_encoderrT      s      233|T2r=   c                 H    t        |        t        | dd       
| `d | _        y y )Npeft_config)r   getattrrV   _hf_peft_config_loadedrQ   s    r;   !_remove_text_encoder_monkey_patchrY      s,    |,|]D1=$.2+ >r=   c                    d }t        | t              s|r||k|j                  d      rZ	 |t        | d|      }t	        | |xs t
        |||||||	|

      }t        j                  j                  |d      }t        |      }|:|t        | d|      }t	        | |xs t        |||||||	|

      }t        |      }d }|fS | }||fS # t        t        j                  f$ r}|s|d }d }Y d }~ld }~ww xY w)N.safetensors)file_extensionlocal_files_only)	weights_name	cache_dirforce_downloadproxiesr]   tokenrevision	subfolder
user_agentcpu)devicez.bin)r-   dictendswith_best_guess_weight_namer   LORA_WEIGHT_NAME_SAFEsafetensorstorch	load_filer    IOErrorSafetensorErrorLORA_WEIGHT_NAMEr   )%pretrained_model_name_or_path_or_dictweight_nameuse_safetensorsr]   r_   r`   ra   rb   rc   rd   re   allow_picklemetadata
model_file
state_dictes                   r;   _fetch_state_dictrz      sX    J;TB 3#(<(<^(L &"9='5)9#K
 -9!,!E0E'#1#%5%')
 )..88E8R
8D "59&cs )5(<,<#-!1!#%J )4JH x ;
x= [889 #G!
s   AC
 
C5#C00C5c                    |st         rt        d      g }t        j                  j	                  |       ry t        j                  j                  |       r5t        j                  |       D cg c]  }|j                  |      s| }}nJt        |       j                  }|D cg c]*  }|j                  j                  |      s|j                  , }}t        |      dk(  ry h dt        t        fd|            }t        d |D              rt        t        d |            }n(t        d |D              rt        t        d |            }t        |      d	kD  rt        j!                  d
| d|d    d       |d   }|S c c}w c c}w )Nz>When using the offline mode, you must specify a `weight_name`.r   >   	optimizer	scheduler
checkpointc                 .     t         fdD              S )Nc              3   &   K   | ]  }|v 
 y wNr+   ).0	substringxs     r;   	<genexpr>z<_best_guess_weight_name.<locals>.<lambda>.<locals>.<genexpr>+  s     VIYa/V   )all)r   unallowed_substringss   `r;   <lambda>z)_best_guess_weight_name.<locals>.<lambda>+  s    VAUVV r=   c              3   F   K   | ]  }|j                  t                y wr   ri   rq   r   fs     r;   r   z*_best_guess_weight_name.<locals>.<genexpr>.  s     
@A1::&'
@   !c                 ,    | j                  t              S r   r   r   s    r;   r   z)_best_guess_weight_name.<locals>.<lambda>/  s    qzz:J/K r=   c              3   F   K   | ]  }|j                  t                y wr   ri   rk   r   s     r;   r   z*_best_guess_weight_name.<locals>.<genexpr>0  s     G1QZZ-.Gr   c                 ,    | j                  t              S r   r   r   s    r;   r   z)_best_guess_weight_name.<locals>.<lambda>1  s    qzz:O/P r=      z9Provided path contains more than one weights file in the z
 format. `z]` is going to be loaded, for precise control, specify a `weight_name` in `load_lora_weights`.)r
   r4   ospathisfileisdirlistdirri   r	   siblings	rfilenamerF   r/   filteranyloggerwarning)rr   r\   r]   targeted_filesr   files_in_repors   r   s          @r;   rj   rj     ss    >YZZN	ww~~;<	<	=%'ZZ0U%VuZ[ZdZdesZt!uu"#HIRR/<e!@T@TUc@d!++ee
>a
 DVXfgN 
@
@@f%K^\]	GG	Gf%PR`ab
>QGGWWabpqrbsat  uR  S	
 !#K3 v fs   -FF$ F
F
c                 b    | j                         D ci c]  \  }}| d| | }}}|S c c}}w )N.)items)rx   prefixkeyvaluesd_with_prefixs        r;   _pack_dict_with_prefixr   ;  s>    AKAQAQAST:3#'.TNT Us   +hotswapc           	         ddl m} t        st        d      |r|
rt        d      i }|r3t	        dd      st        d      t        dd	      st        d
      ||d<   |n|}|	r-t        fd| j                         D              rt        d      || j                         D ci c]/  \  }}|j                  | d      s|j                  | d      |1 } }}|
J|
j                         D ci c]/  \  }}|j                  | d      s|j                  | d      |1 }
}}t        |       dkD  rt        j                  d| d       i }t        |       } t        |       } |j!                         D ]6  \  }}|j#                  d      s| d}|| v s"| |   j$                  d   ||<   8 ||j                         D cg c].  }|j                  |      s|j'                  d      d   |k(  s-|0 }}|j                         D ci c]  \  }}||v s|j                  | d      |! }}}t)        | ||
|d      }|t+        |      }t-        |      \  }}} |j.                  d|| |d| t1        ||       |j3                  |j4                  |j6                         |r|j9                          na|r|j;                          nN|rL|j<                  j?                         D ]/  }tA        |tB        jD                  jF                        s( ||       1 |9| s6|jH                  jJ                  }t        jM                  d| d|d| d       y y y c c}}w c c}}w c c}w c c}}w )Nr   )*_maybe_remove_and_reapply_group_offloading)PEFT backend is required for this method.zJ`network_alphas` and `metadata` cannot be specified both at the same time.z>=z0.13.1zq`low_cpu_mem_usage=True` is not compatible with this `peft` version. Please update it with `pip install -U peft`.>z4.45.2z`low_cpu_mem_usage=True` is not compatible with this `transformers` version. Please update it with `pip install -U transformers`.low_cpu_mem_usagec              3   &   K   | ]  }|v  
 y wr   r+   )r   r   text_encoder_names     r;   r   z/_load_lora_into_text_encoder.<locals>.<genexpr>i  s     MC(C/Mr   z[At the moment, hotswapping is not supported for text encoders, please pass `hotswap=False`.r   r   zLoading )z.q_projz.k_projz.v_projz	.out_projz.fc1z.fc2z.lora_B.weightr   F)is_unet)adapter_nameadapter_state_dictrV   )weight)rg   dtypezNo LoRA keys associated to z found with the prefix=zG. This is safe to ignore if LoRA state dict didn't originally have any z related params. You can also try specifying `prefix=None` to resolve the warning. Otherwise, open an issue if you think it's unexpected: https://github.com/huggingface/diffusers/issues/newr+   )'hooks.group_offloadingr   r   r4   r   r   r   keysr   
startswithremoveprefixrF   r   infor   r   named_modulesri   shapesplitr   r   #_func_optionally_disable_offloadingload_adapterr   torg   r   enable_model_cpu_offloadenable_sequential_cpu_offload
componentsvaluesr-   rm   nnModule	__class____name__r   )rx   network_alphasr5   r   r6   r   r   	_pipeliner   r   rv   r   peft_kwargskvrankname_rank_key
alpha_keyslora_configis_model_cpu_offloadis_sequential_cpu_offloadis_group_offload	componentmodel_class_names        `                    r;   _load_lora_into_text_encoderr   @  s    TDEE(effKtX. D  'sH5  T  ,='(
 #).fF 3M:??;LMMvww BLBRBRBTs$!QXYXdXdhngoopeqXrannxq\2A5s
sDLNNDTsDAqXYXdXdhngoopeqXr&|4a7sHs
:hvha()4Z@
 0
;
#113 	CGD!}}[\"V>2z)%/%9%?%?%BDN		C %%3%8%8%:qall6>RWXW^W^_bWcdeWfjpWp!qJqJXJ^J^J`t$!Qdeisdsannxq\:A=tNt **nhPT^cd +L9L MpM
I79I
 	"!! 	
%)#	
 		
 	,z:|22,:L:LM  ..0&335&1188: J	i9>yIJ
 *'11::)*:);;SF9 UT  !BB	
 #-s ts" rts6   )MM5M$M$	M* M*8M*M/M/c                    ddl m} d}d}d}| O| j                  B| j                  j	                         D ]  \  }}t        |t        j                        s!|xs  ||      }t        |d      s:|xs t        |j                  t              }|xs[ t        |j                  t              xs? t        |j                  d      xr' t        |j                  j                  d   t              } |s|rkt        j                  d       | j                  j	                         D ]9  \  }}t        |t        j                        rt        |d      s-t        ||       ; |||fS )	ah  
    Optionally removes offloading in case the pipeline has been already sequentially offloaded to CPU.

    Args:
        _pipeline (`DiffusionPipeline`):
            The pipeline to disable offloading for.

    Returns:
        tuple:
            A tuple indicating if `is_model_cpu_offload` or `is_sequential_cpu_offload` or `is_group_offload` is True.
    r   )_is_group_offload_enabledF_hf_hookhooksr   zAccelerate hooks detected. Since you have called `load_lora_weights()`, the previous hooks will be first removed. Then the LoRA parameters will be loaded and the hooks will be applied again.)recurse)r   r   hf_device_mapr   r   r-   r   r   hasattrr   r$   r#   r   r   r   r%   )r   r   r   r   r   r   r   s          r;   r   r     sb    C  %!8!8!@%00668 	LAyi3/W3LY3W9j1#7#e:iFXFXZd;e (A )9--/?@ N9--w7 Ny1177:<LM &	 %(<KK Q !* 4 4 : : < V9!)RYY7wyR\?]'	;TUV
 !";=MNNr=   c                       e Zd ZdZg Z e       Zedefd       Z	ed        Z
ed        Zd Zed        Zed        Zd	 Zg d
ddfdee   dededeee      fdZg fdee   fdZ	 d'deee   ef   deeeeee   ee   f      fdZd Zd Zdeee   ef   fdZdee   fdZdeeee   f   fdZdee   deej@                  ee!f   ddfdZ"d(dZ#e$d        Z%e$	 d'deeejL                  f   ded ed!ed"e'd#ed$ee(   fd%       Z)ed&        Z*y))LoraBaseMixinz!Utility class for handling LoRAs.returnc                 6    t        | d      r| j                  S dS )z
        Returns the lora scale which can be set at run time by the pipeline. # if `_lora_scale` has not been set,
        return 1.
        _lora_scaler)   )r   r   selfs    r;   r6   zLoraBaseMixin.lora_scale  s     $+4#?tHSHr=   c                 ,    t        | j                        S )z1Returns the number of LoRAs that have been fused.)rF   _merged_adaptersr   s    r;   num_fused_loraszLoraBaseMixin.num_fused_loras  s     4(())r=   c                     | j                   S )z0Returns names of the LoRAs that have been fused.)r   r   s    r;   fused_loraszLoraBaseMixin.fused_loras  s     $$$r=   c                     t        d      )Nz)`load_lora_weights()` is not implemented.NotImplementedError)r   kwargss     r;   load_lora_weightszLoraBaseMixin.load_lora_weights  s    !"MNNr=   c                     t        d      )Nz&`save_lora_weights()` not implemented.r   clsr   s     r;   save_lora_weightszLoraBaseMixin.save_lora_weights  s    !"JKKr=   c                     t        d      )Nz'`lora_state_dict()` is not implemented.r   r   s     r;   lora_state_dictzLoraBaseMixin.lora_state_dict  s    !"KLLr=   c                 
   t         st        d      | j                  D ]c  }t        | |d      }|t	        |j
                  t              r|j                          >t	        |j
                  t              sYt        |       e y)z
        Unloads the LoRA parameters.

        Examples:

        ```python
        >>> # Assuming `pipeline` is already loaded with the LoRA parameters.
        >>> pipeline.unload_lora_weights()
        >>> ...
        ```
        r   N)
r   r4   _lora_loadable_modulesrW   
issubclassr   r   unload_lorar!   rY   r   r   models      r;   unload_lora_weightsz!LoraBaseMixin.unload_lora_weights  sl      HII44 	=ID)T2E eooz:%%'A5e<	=r=   r)   FNr   r6   r7   r*   c                 @   d|v rd}t        dd|       d|v rd}t        dd|       d|v rd}t        dd|       t        |      dk(  rt        d	      t               }|D ](  }|| j                  vrt        | d
| j                  d      t        | |d      }	|	=t        |	j                  t              r^|	j                  |||       |	j                         D ]7  }
t        |
t              s|j                  t        |
j                               9 t        |	j                  t              st!        |	|||       |	j                         D ]7  }
t        |
t              s|j                  t        |
j                               9 + | j"                  |z  | _        y)a  
        Fuses the LoRA parameters into the original parameters of the corresponding blocks.

        <Tip warning={true}>

        This is an experimental API.

        </Tip>

        Args:
            components: (`List[str]`): List of LoRA-injectable components to fuse the LoRAs into.
            lora_scale (`float`, defaults to 1.0):
                Controls how much to influence the outputs with the LoRA parameters.
            safe_fusing (`bool`, defaults to `False`):
                Whether to check fused weights for NaN values before fusing and if values are NaN not fusing them.
            adapter_names (`List[str]`, *optional*):
                Adapter names to be used for fusing. If nothing is passed, all active adapters will be fused.

        Example:

        ```py
        from diffusers import DiffusionPipeline
        import torch

        pipeline = DiffusionPipeline.from_pretrained(
            "stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16
        ).to("cuda")
        pipeline.load_lora_weights("nerijs/pixel-art-xl", weight_name="pixel-art-xl.safetensors", adapter_name="pixel")
        pipeline.fuse_lora(lora_scale=0.7)
        ```
        	fuse_unetzPassing `fuse_unet` to `fuse_lora()` is deprecated and will be ignored. Please use the `components` argument and provide a list of the components whose LoRAs are to be fused. `fuse_unet` will be removed in a future version.1.0.0fuse_transformerzPassing `fuse_transformer` to `fuse_lora()` is deprecated and will be ignored. Please use the `components` argument and provide a list of the components whose LoRAs are to be fused. `fuse_transformer` will be removed in a future version.fuse_text_encoderzPassing `fuse_text_encoder` to `fuse_lora()` is deprecated and will be ignored. Please use the `components` argument and provide a list of the components whose LoRAs are to be fused. `fuse_text_encoder` will be removed in a future version.r   %`components` cannot be an empty list.- is not found in self._lora_loadable_modules=r   N)r7   r*   )r6   r7   r*   )r   rF   r4   setr   rW   r   r   r   	fuse_lorar,   r-   r"   updatemerged_adaptersr!   r<   r   )r   r   r6   r7   r*   r   depr_messagemerged_adapter_namesfuse_componentr   r9   s              r;   r  zLoraBaseMixin.fuse_lora  s   N &  }L
 ' KL"
 &( ML# z?aDEE  #u( 	UNT%@%@@ N#33aTE`E`Dbbc!deeD.$7E eooz:OOJKWdOe"'--/ U%fn=077F<R<R8STU eoo?**+]j #(--/ U%fn=077F<R<R8STU#	U* !% 5 58L Lr=   c                 X   d|v rd}t        dd|       d|v rd}t        dd|       d|v rd}t        dd|       t        |      dk(  rt        d	      |D ]  }|| j                  vrt        | d
| j                  d      t	        | |d      }|<t        |j                  t        t        f      s]|j                         D ]d  }t        |t              st        |j                        D ])  }|s|| j                  v s| j                  |hz
  | _        + |j                          f  y)a  
        Reverses the effect of
        [`pipe.fuse_lora()`](https://huggingface.co/docs/diffusers/main/en/api/loaders#diffusers.loaders.LoraBaseMixin.fuse_lora).

        <Tip warning={true}>

        This is an experimental API.

        </Tip>

        Args:
            components (`List[str]`): List of LoRA-injectable components to unfuse LoRA from.
            unfuse_unet (`bool`, defaults to `True`): Whether to unfuse the UNet LoRA parameters.
            unfuse_text_encoder (`bool`, defaults to `True`):
                Whether to unfuse the text encoder LoRA parameters. If the text encoder wasn't monkey-patched with the
                LoRA parameters then it won't have any effect.
        unfuse_unetzPassing `unfuse_unet` to `unfuse_lora()` is deprecated and will be ignored. Please use the `components` argument. `unfuse_unet` will be removed in a future version.r  unfuse_transformerzPassing `unfuse_transformer` to `unfuse_lora()` is deprecated and will be ignored. Please use the `components` argument. `unfuse_transformer` will be removed in a future version.unfuse_text_encoderzPassing `unfuse_text_encoder` to `unfuse_lora()` is deprecated and will be ignored. Please use the `components` argument. `unfuse_text_encoder` will be removed in a future version.r   r  r  r   N)r   rF   r4   r   rW   r   r   r   r!   r,   r-   r"   r  r
  r   r?   )r   r   r   r  r  r   r9   adapters           r;   unfuse_lorazLoraBaseMixin.unfuse_lorar  sY   $ F" BL
  6) PL$
 !F* RL% z?aDEE( 	-NT%@%@@ N#33aTE`E`Dbbc!deeD.$7E eoo
O/LM"'--/ -%fn=+.v/E/E+F ^#*w$:O:O/O<@<Q<QU\T]<]D$9^ #NN,-	-r=   adapter_weightsc                    t        |t              rt        |j                               }t        | j                        }t        ||z
        }|rNt        j                  d| d| j                   d       |j                         D ci c]  \  }}||vs|| }}}t        |t              r|gn|}t        j                  |      }t        |t              s|gt        |      z  }t        |      t        |      k7  r#t        dt        |       dt        |             | j                         }|j!                         D 	
ch c]  }	|	D ]  }
|
  }}	}
t        |      |z
  }t        |      dkD  rt        d| d| d	      |D 
	ci c]+  }
|
|j                         D 	cg c]  \  }}	|
|	v s| c}	}- }}}
}	i }| j                  D ]  }t#        | |d
      }|t%        ||      D ]  \  }}t        |t              rG|j'                  |d
      }|5|||   vr.t        j                  d| d| d| d| d| d||    d	       n|}|j)                  |g        ||   j+                  |        t-        |j.                  t0              r|j3                  |||          t-        |j.                  t4              st7        ||||           y
c c}}w c c}
}	w c c}	}w c c}	}}
w )a  
        Set the currently active adapters for use in the pipeline.

        Args:
            adapter_names (`List[str]` or `str`):
                The names of the adapters to use.
            adapter_weights (`Union[List[float], float]`, *optional*):
                The adapter(s) weights to use with the UNet. If `None`, the weights are set to `1.0` for all the
                adapters.

        Example:

        ```py
        from diffusers import AutoPipelineForText2Image
        import torch

        pipeline = AutoPipelineForText2Image.from_pretrained(
            "stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16
        ).to("cuda")
        pipeline.load_lora_weights(
            "jbilcke-hf/sdxl-cinematic-1", weight_name="pytorch_lora_weights.safetensors", adapter_name="cinematic"
        )
        pipeline.load_lora_weights("nerijs/pixel-art-xl", weight_name="pixel-art-xl.safetensors", adapter_name="pixel")
        pipeline.set_adapters(["cinematic", "pixel"], adapter_weights=[0.5, 0.5])
        ```
        zLThe following components in `adapter_weights` are not part of the pipeline: z1. Available components that are LoRA-compatible: zN. So, weights belonging to the invalid components will be removed and ignored.rD   rE   r   zAdapter name(s) z& not in the list of present adapters: r   NzLora weight dict for adapter 'z' contains z",but this will be ignored because z does not contain weights for z.Valid parts for z are: )r-   rh   r  r   r   sortedr   r   r   rJ   copydeepcopyr/   rF   r4   get_list_adaptersr   rW   zippop
setdefaultappendr   r   r   set_adaptersr!   rK   )r   r*   r  components_passedlora_componentsinvalid_componentsr   r   list_adaptersadaptersr  all_adaptersmissing_adapterspartinvert_list_adapters_component_adapter_weightsr   r   r   rG   component_adapter_weightss                        r;   r  zLoraBaseMixin.set_adapters  sN   > ot, #O$8$8$: ;!$"="=>O!'(9O(K!L!bcubv wFFJFaFaEb cMM
 5D4I4I4K"kDAqqXjOj1a4"k"k+5mS+I}--8 /40./#m2DDO}_!55*3}+=*>>ijmn}j~i  A  ..00=0D0D0F_HV^_7___}-< 1$"#3"44Z[gZhhij  ( 
  
 1D1D1F^~tX'U]J]d^^ 
  
 &("44 	kID)T2E }),]O)L X%ggt,07It0L-0<RfgsRtAt"@kZcYd eDDP>Qopyoz {33?.G[\hGiFjjk!m 18-*55iD*95<<=VW!X$ %//:6""=2LY2WXEOO_=-mUD^_hDij9	k? #l  ` _ 
s0   KKK
K$#K0K4K$K$c                 
   t         st        d      | j                  D ]c  }t        | |d      }|t	        |j
                  t              r|j                          >t	        |j
                  t              sYt        |       e y)a$  
        Disables the active LoRA layers of the pipeline.

        Example:

        ```py
        from diffusers import AutoPipelineForText2Image
        import torch

        pipeline = AutoPipelineForText2Image.from_pretrained(
            "stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16
        ).to("cuda")
        pipeline.load_lora_weights(
            "jbilcke-hf/sdxl-cinematic-1", weight_name="pytorch_lora_weights.safetensors", adapter_name="cinematic"
        )
        pipeline.disable_lora()
        ```
        r   N)
r   r4   r   rW   r   r   r   disable_lorar!   rR   r   s      r;   r+  zLoraBaseMixin.disable_lora  sl    &  HII44 	9ID)T2E eooz:&&(A1%8	9r=   c                 
   t         st        d      | j                  D ]c  }t        | |d      }|t	        |j
                  t              r|j                          >t	        |j
                  t              sYt        |       e y)a"  
        Enables the active LoRA layers of the pipeline.

        Example:

        ```py
        from diffusers import AutoPipelineForText2Image
        import torch

        pipeline = AutoPipelineForText2Image.from_pretrained(
            "stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16
        ).to("cuda")
        pipeline.load_lora_weights(
            "jbilcke-hf/sdxl-cinematic-1", weight_name="pytorch_lora_weights.safetensors", adapter_name="cinematic"
        )
        pipeline.enable_lora()
        ```
        r   N)
r   r4   r   rW   r   r   r   enable_lorar!   rT   r   s      r;   r-  zLoraBaseMixin.enable_lora0  sl    &  HII44 	8ID)T2E eooz:%%'A07	8r=   c                 B   t         st        d      t        |t              r|g}| j                  D ]l  }t        | |d      }|t        |j                  t              r|j                  |       ?t        |j                  t              sZ|D ]  }t        ||        n y)a  
        Delete an adapter's LoRA layers from the pipeline.

        Args:
            adapter_names (`Union[List[str], str]`):
                The names of the adapters to delete.

        Example:

        ```py
        from diffusers import AutoPipelineForText2Image
        import torch

        pipeline = AutoPipelineForText2Image.from_pretrained(
            "stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16
        ).to("cuda")
        pipeline.load_lora_weights(
            "jbilcke-hf/sdxl-cinematic-1", weight_name="pytorch_lora_weights.safetensors", adapter_names="cinematic"
        )
        pipeline.delete_adapters("cinematic")
        ```
        r   N)r   r4   r-   rJ   r   rW   r   r   r   delete_adaptersr!   r   )r   r*   r   r   r   s        r;   r/  zLoraBaseMixin.delete_adaptersN  s    .  HIImS)*OM44 	CID)T2E eooz:))-8A(5 C-e\BC	Cr=   c                 
   t         st        d      g }| j                  D ]`  }t        | |d      }|t	        |j
                  t              s.|j                         D ]   }t        |t              s|j                  } ` b |S )a  
        Gets the list of the current active adapters.

        Example:

        ```python
        from diffusers import DiffusionPipeline

        pipeline = DiffusionPipeline.from_pretrained(
            "stabilityai/stable-diffusion-xl-base-1.0",
        ).to("cuda")
        pipeline.load_lora_weights("CiroN2022/toy-face", weight_name="toy_face_sdxl.safetensors", adapter_name="toy")
        pipeline.get_active_adapters()
        ```
        iPEFT backend is required for this method. Please install the latest version of PEFT `pip install -U peft`N)r   r4   r   rW   r   r   r   r,   r-   r"   active_adapters)r   r2  r   r   r9   s        r;   get_active_adaptersz!LoraBaseMixin.get_active_adapterst  s       {  44 	ID)T2E Z%L#mmo F!&.9*0*@*@	 r=   c                    t         st        d      i }| j                  D ]f  }t        | |d      }|t	        |j
                  t        t        f      s4t        |d      sAt        |j                  j                               ||<   h |S )zR
        Gets the current list of all available adapters in the pipeline.
        r1  NrV   )r   r4   r   rW   r   r   r   r!   r   r/   rV   r   )r   r  r   r   s       r;   r  zLoraBaseMixin.get_list_adapters  s      {  44 	IID)T2E!u_0MNE=1*.u/@/@/E/E/G*HY'	I r=   rg   c                     t         st        d      | j                  D ]  }t        | |d      }||j	                         D ]  }t        |t              s|D ]  }||j                  vr|j                  |   j                  |       |j                  |   j                  |       t        |d      s[|j                  h||j                  v sw|j                  |   j                  |      |j                  |<      y)a/  
        Moves the LoRAs listed in `adapter_names` to a target device. Useful for offloading the LoRA to the CPU in case
        you want to load multiple adapters and free some GPU memory.

        After offloading the LoRA adapters to CPU, as long as the rest of the model is still on GPU, the LoRA adapters
        can no longer be used for inference, as that would cause a device mismatch. Remember to set the device back to
        GPU before using those LoRA adapters for inference.

        ```python
        >>> pipe.load_lora_weights(path_1, adapter_name="adapter-1")
        >>> pipe.load_lora_weights(path_2, adapter_name="adapter-2")
        >>> pipe.set_adapters("adapter-1")
        >>> image_1 = pipe(**kwargs)
        >>> # switch to adapter-2, offload adapter-1
        >>> pipeline.set_lora_device(adapter_names=["adapter-1"], device="cpu")
        >>> pipeline.set_lora_device(adapter_names=["adapter-2"], device="cuda:0")
        >>> pipe.set_adapters("adapter-2")
        >>> image_2 = pipe(**kwargs)
        >>> # switch back to adapter-1, offload adapter-2
        >>> pipeline.set_lora_device(adapter_names=["adapter-2"], device="cpu")
        >>> pipeline.set_lora_device(adapter_names=["adapter-1"], device="cuda:0")
        >>> pipe.set_adapters("adapter-1")
        >>> ...
        ```

        Args:
            adapter_names (`List[str]`):
                List of adapters to send device to.
            device (`Union[torch.device, str, int]`):
                Device to send the adapters to. Can be either a torch device, a str or an integer.
        r   Nlora_magnitude_vector)r   r4   r   rW   r,   r-   r"   lora_Ar   lora_Br   r6  )r   r*   rg   r   r   r9   r   s          r;   set_lora_devicezLoraBaseMixin.set_lora_device  s    @  HII44 	1ID)T2E #mmo 1F!&.9,9 1L+6==@ ("MM,7::6B"MM,7::6B&v/FGFLhLhLt#/63O3O#OQWQmQm(4R&&(bj %+$@$@$N11	1r=   c                     | j                   j                         D ]3  \  }}t        |d      s|| j                  v s" |j                  di | 5 y)aH  
        Hotswap adapters without triggering recompilation of a model or if the ranks of the loaded adapters are
        different.

        Args:
            target_rank (`int`):
                The highest rank among all the adapters that will be loaded.
            check_compiled (`str`, *optional*, defaults to `"error"`):
                How to handle a model that is already compiled. The check can return the following messages:
                  - "error" (default): raise an error
                  - "warn": issue a warning
                  - "ignore": do nothing
        enable_lora_hotswapNr+   )r   r   r   r   r;  )r   r   r   r   s       r;   r;  z!LoraBaseMixin.enable_lora_hotswap  sO     #oo335 	8NCy"78cTE`E`>`-	--77	8r=   c                     t        | t        j                  j                        r| j	                         n| }t        ||      S r   )r-   rm   r   r   rx   r   )layersr   layers_weightss      r;   pack_weightszLoraBaseMixin.pack_weights  s1    0:6588??0S**,Y_%nf==r=   rx   save_directoryis_main_processrs   save_functionsafe_serializationr&   c                    t         j                  j                  |      rt        j	                  d| d       yr|st        d      rt        t              st        d      ||rfd}nt        j                  }t        j                  |d       ||rt        }nt        }t        ||      j                         } || |       t        j!                  d	|        y)
zLWrites the state dict of the LoRA layers (optionally with metadata) to disk.zProvided path (z#) should be a directory, not a fileNzP`lora_adapter_metadata` cannot be specified when not using `safe_serialization`.z/`lora_adapter_metadata` must be of type `dict`.c                     ddi}rVj                         D ]$  \  }}t        |t              st        |      |<   & t	        j
                  dd      |t        <   t        j                  j                  | ||      S )Nformatptr   T)indent	sort_keys)rv   )
r   r-   r  r/   jsondumpsLORA_ADAPTER_METADATA_KEYrl   rm   	save_file)rG   filenamerv   r   r   r&   s        r;   rB  z6LoraBaseMixin.write_lora_layers.<locals>.save_function  s     ($/H,*?*E*E*G IJC)%5=A%[ 5c :I ?Cjj1!t?!:; ',,66wS[6\\r=   T)exist_okzModel weights saved in )r   r   r   r   errorr4   r-   rh   	TypeErrorrm   savemakedirsrk   rq   r   as_posixr   )rx   r@  rA  rs   rB  rC  r&   	save_paths         ` r;   write_lora_layerszLoraBaseMixin.write_lora_layers  s     77>>.)LL?>*::]^_ );opp 4I4)PMNN !] !&


NT2!3.5>>@	j),-i[9:r=   c                     t        |      S )N)r   )r   )r   r   s     r;   _optionally_disable_offloadingz,LoraBaseMixin._optionally_disable_offloading+  s    2YGGr=   r   )r   N)+r   
__module____qualname____doc__r   r  r   propertyfloatr6   r   r   r   classmethodr   r   r   r   rJ   boolr   r  r  r   r   r  r+  r-  r/  r3  r  rm   rg   intr9  r;  staticmethodr?  Tensorr   rh   rV  rX  r+   r=   r;   r   r     sa   +uIE I I * * % %O L L M M=2 !#!-1XMIXM XM 	XM
  S	*XMt 35 7-d3i 7-x RVekT#Y^,ek "%tT%[$t*(L"MNekN9<8<$CU49c>-B $CLT#Y B4T#Y#7 ,41T#Y 41ellTWY\F\@] 41bf 41l8$ > >  150;ell*+0;0; 0; 	0;
  0; !0;  (~0; 0;d H Hr=   r   )r)   FN)NNr   )r[   F)Nr)   r5   NNFFN)Jr  r0   rJ  r   pathlibr   typingr   r   r   r   r   rl   rm   torch.nnr   huggingface_hubr	   huggingface_hub.constantsr
   models.modeling_utilsr   r   utilsr   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   utils.peft_utilsr   utils.state_dict_utilsr    transformersr!   peft.tuners.tuners_utilsr"   accelerate.hooksr#   r$   r%   
get_loggerr   r   rq   rk   rL  r<   r@   rJ   r]  rK   rR   rT   rY   rz   rj   r   r_  r   r   r   r+   r=   r;   <module>rp     s~      	  8 8    & 4 ?    & 3 B ,7VV			H	%- : 3 !)H  15LP)YcC()Y,-)Y #5UT$Z)G#HI)YX49J0K 43x8I/J 33( L ` \a$N $o
 o
d)OXO	H O	Hr=   