
    bi              	          d dl mZmZ d dlZd dlmZ d dlmZ d dlm	Z	 d dl
mZ ddlmZ  G d	 d
ej                  j                  e      Zdej                  j                  dededeej                  j                     fdZy)    )AnyOptionalN)is_gptqmodel_available)	LoraLayer)BaseTunerLayer)get_auto_gptq_quant_linear   )LoraVariantc                        e Zd Z	 	 	 	 	 	 	 	 	 ddedededededededed	ed
ef fdZdededee	   fdZ
dej                  fdZdef fdZ xZS )GPTQLoraLinearadapter_namer
lora_alphalora_dropoutinit_lora_weights
use_rslorause_dora
use_qalora	lora_biasqalora_group_sizec                     t         |           t        j                  | |       |r"t        | j                  j
                   d      || _        || _        | j                  ||||||||	|
|
       y )Nz2 does not support DoRA yet, please set it to False)r   r   r   r   r   r   r   r   )	super__init__r   
ValueError	__class____name__quant_linear_module_active_adapterupdate_layer)self
base_layerr   r   r   r   r   r   r   r   r   r   kwargsr   s                P/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/peft/tuners/lora/gptq.pyr   zGPTQLoraLinear.__init__   s     	4, 7 788jkll $. +!%/!!/ 	 	
    returnc                    |r%|r#t        d| j                  j                   d      |rddlm}  |       }|S |rddlm}  |       }|S d }|S )Nz7Dora and QA_lora at the same time is not supported for z (yet).r	   )DoraLinearVariant)QALoraLinearVariant)NotImplementedErrorr   r   variantsr'   r(   )r    r   r   r"   r'   variantr(   s          r#   resolve_lora_variantz#GPTQLoraLinear.resolve_lora_variantA   sf    
%I$..JaJaIbbij  3')G  5)+G  Gr$   xc           	      8   | j                  |      }| j                  r|S | j                  j                         }| j                  D ]  }||vr|j
                  }| j                  |   }| j                  |   }| j                  |   }| j                  |   }	| j                  ||j                  j
                        }|| j                  vr| | | ||                  |	z  z   }n"| j                  |   j                  | |||      }|j                  |      } |S )N)active_adapterr-   result)r   disable_adapterslora_Akeysactive_adaptersdtypelora_Br   scaling_cast_input_dtypeweightlora_variantforwardto)
r    r-   r0   lora_A_keysr/   torch_result_dtyper2   r6   dropoutr7   s
             r#   r;   zGPTQLoraLinear.forwardR   s"   ))!,  Mkk&&("22 	3N[0!'[[0F[[0F''7Gll>2G&&q&--*=*=>AT%6%66&
);"<w"FF**>:BB#1!	 C  YY12F-	3. r$   c                 *    t         |          }d|z   S )Nzlora.)r   __repr__)r    repr   s     r#   rA   zGPTQLoraLinear.__repr__t   s    g }r$   )	r   r	   g        TFFFF    )r   
__module____qualname__strintfloatboolr   r   r
   r,   torchTensorr;   rA   __classcell__)r   s   @r#   r   r      s    
 !"&  !#$
 $
 	$

 $
 $
  $
 $
 $
 $
 $
 $
L $ U]^iUj "   D#  r$   r   targetr   r"   r%   c                 Z   d }t        | t              r| j                         }n| }|j                  dd       }t	               r2ddlm} t        ||      rt        | |fi |}|j                  | _        |S t        |      }|*t        ||      rt        | |fi |}|j                  | _        |S )Ngptq_quantization_configr   )BaseQuantLinear)

isinstancer   get_base_layergetr   gptqmodel.nn_modules.qlinearrP   r   qweightr   )rM   r   r"   
new_moduletarget_base_layercfgrP   quant_linears           r#   dispatch_gptqrZ      s    
 J&.)"113"
**/
6C@'9'GGJ.66FN  2#6#
3Dl(S'GGJ.66FNr$   )typingr   r   rJ   peft.import_utilsr   peft.tuners.lora.layerr   peft.tuners.tuners_utilsr   
peft.utilsr   layerr
   nnModuler   rF   rZ    r$   r#   <module>rd      ss    !  4 , 3 1 \UXX__i \JHHOO  ehhoo	r$   