
    bi                         d dl Z d dlmZ d dlmZmZmZ d dlZddlm	Z	m
Z
 ddlmZ ddlmZ dd	lmZ 	 	 dd
Ze G d de             Z G d dee	      Zy)    N)	dataclass)OptionalTupleUnion   )ConfigMixinregister_to_config)
BaseOutput)randn_tensor   )SchedulerMixinc           
      $   |dk(  rd }n|dk(  rd }nt        d|       g }t        |       D ]<  }|| z  }|dz   | z  }|j                  t        d ||       ||      z  z
  |             > t	        j
                  |t        j                        S )a  
    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of
    (1-beta) over time from t = [0,1].

    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up
    to that part of the diffusion process.


    Args:
        num_diffusion_timesteps (`int`): the number of betas to produce.
        max_beta (`float`): the maximum beta to use; use values lower than 1 to
                     prevent singularities.
        alpha_transform_type (`str`, *optional*, default to `cosine`): the type of noise schedule for alpha_bar.
                     Choose from `cosine` or `exp`

    Returns:
        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs
    cosinec                 f    t        j                  | dz   dz  t         j                  z  dz        dz  S )NgMb?gT㥛 ?r   )mathcospits    n/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/schedulers/scheduling_consistency_decoder.pyalpha_bar_fnz)betas_for_alpha_bar.<locals>.alpha_bar_fn'   s-    88QY%/$''9A=>!CC    expc                 2    t        j                  | dz        S )Ng      ()r   r   r   s    r   r   z)betas_for_alpha_bar.<locals>.alpha_bar_fn,   s    88AI&&r   z"Unsupported alpha_transform_type: r   )dtype)
ValueErrorrangeappendmintorchtensorfloat32)num_diffusion_timestepsmax_betaalpha_transform_typer   betasit1t2s           r   betas_for_alpha_barr*      s    . x'	D 
	&	' =>R=STUUE*+ M((!e..S\"-R0@@@(KLM <<U]]33r   c                   0    e Zd ZU dZej
                  ed<   y)!ConsistencyDecoderSchedulerOutputa>  
    Output class for the scheduler's `step` function.

    Args:
        prev_sample (`torch.Tensor` of shape `(batch_size, num_channels, height, width)` for images):
            Computed sample `(x_{t-1})` of previous timestep. `prev_sample` should be used as next model input in the
            denoising loop.
    prev_sampleN)__name__
__module____qualname____doc__r    Tensor__annotations__ r   r   r,   r,   :   s     r   r,   c                   N   e Zd ZdZe	 	 ddedefd       Z	 	 ddee   de	e
ej                  f   fdZed	        Zdd
ej                   dee   dej                   fdZ	 	 ddej                   de	eej                   f   d
ej                   deej$                     dede	eef   fdZy)ConsistencyDecoderSchedulerr   num_train_timesteps
sigma_datac                    t        |      }d|z
  }t        j                  |d      }t        j                  |      | _        t        j                  d|z
        | _        t        j                  d|z  dz
        }t        j                  d|z        }||dz  z  |dz  |dz  z   z  | _        ||z  |dz  |dz  z   dz  z  | _        ||dz  |dz  z   dz  z  | _        y )Ng      ?r   )dimr   r         ?)	r*   r    cumprodsqrtsqrt_alphas_cumprodsqrt_one_minus_alphas_cumprodc_skipc_outc_in)selfr7   r8   r&   alphasalphas_cumprodsigmassqrt_recip_alphas_cumprods           r   __init__z$ConsistencyDecoderScheduler.__init__K   s     $$78uv15#(::n#= -2ZZn8L-M*C.0145$)JJs^/C$D!/*a-?619z[\}C\]j(FAI
A,E#+MM
-Z]1Js0RR	r   Nnum_inference_stepsdevicec                    |dk7  rt        d      t        j                  ddgt        j                  |      | _        | j
                  j                  |      | _        | j                  j                  |      | _        | j                  j                  |      | _        | j                  j                  |      | _	        | j                  j                  |      | _
        y )Nr   z8Currently more than 2 inference steps are not supported.i  i   )r   rJ   )r   r    r!   long	timestepsr>   tor?   r@   rA   rB   )rC   rI   rJ   s      r   set_timestepsz)ConsistencyDecoderScheduler.set_timestepsa   s    
 !#WXXtSkFS#'#;#;#>#>v#F -1-O-O-R-RSY-Z*kknnV,ZZ]]6*
IILL(	r   c                 :    | j                   | j                  d      S )Nr   )r?   rM   )rC   s    r   init_noise_sigmaz,ConsistencyDecoderScheduler.init_noise_sigmap   s    11$..2CDDr   sampletimestepreturnc                 &    || j                   |   z  S )a  
        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the
        current timestep.

        Args:
            sample (`torch.Tensor`):
                The input sample.
            timestep (`int`, *optional*):
                The current timestep in the diffusion chain.

        Returns:
            `torch.Tensor`:
                A scaled input sample.
        )rB   )rC   rR   rS   s      r   scale_model_inputz-ConsistencyDecoderScheduler.scale_model_inputt   s     		(+++r   model_output	generatorreturn_dictc                 <   | j                   |   |z  | j                  |   |z  z   }t        j                  | j                  |k(        d   }|t        | j                        dz
  k(  r|}nt        |j                  ||j                  |j                        }	| j                  | j                  |dz         j                  |j                        |z  | j                  | j                  |dz         j                  |j                        |	z  z   }|s|fS t        |      S )a  
        Predict the sample from the previous timestep by reversing the SDE. This function propagates the diffusion
        process from the learned model outputs (most often the predicted noise).

        Args:
            model_output (`torch.Tensor`):
                The direct output from the learned diffusion model.
            timestep (`float`):
                The current timestep in the diffusion chain.
            sample (`torch.Tensor`):
                A current instance of a sample created by the diffusion process.
            generator (`torch.Generator`, *optional*):
                A random number generator.
            return_dict (`bool`, *optional*, defaults to `True`):
                Whether or not to return a
                [`~schedulers.scheduling_consistency_models.ConsistencyDecoderSchedulerOutput`] or `tuple`.

        Returns:
            [`~schedulers.scheduling_consistency_models.ConsistencyDecoderSchedulerOutput`] or `tuple`:
                If return_dict is `True`,
                [`~schedulers.scheduling_consistency_models.ConsistencyDecoderSchedulerOutput`] is returned, otherwise
                a tuple is returned where the first element is the sample tensor.
        r   r   )rX   r   rJ   )r-   )rA   r@   r    whererM   lenr   shaper   rJ   r>   rN   r?   r,   )
rC   rW   rS   rR   rX   rY   x_0timestep_idxr-   noises
             r   stepz ConsistencyDecoderScheduler.step   s	   > jj"\1DKK4IF4RR{{4>>X#=>qA3t~~.22K isyyY\YcYcdE((q8H)IJMMciiX[^^44T^^LSTDT5UVYYZ]ZcZcdgllm 
 >!0[IIr   )i   r;   )NN)N)NT)r.   r/   r0   orderr	   intfloatrH   r   r   strr    rJ   rO   propertyrQ   r2   rV   	Generatorboolr,   r   ra   r4   r   r   r6   r6   H   s   E $(S S S S. .2+/)%c]) c5<<'() E E, , ,Y^YeYe ,, 04 /Jll/J u||+,/J 	/J
 EOO,/J /J 
0%7	8/Jr   r6   )g+?r   )r   dataclassesr   typingr   r   r   r    configuration_utilsr   r	   utilsr
   utils.torch_utilsr   scheduling_utilsr   r*   r,   r6   r4   r   r   <module>ro      s[     ! ) )  A  , , !)4X 

 
 
lJ.+ lJr   