
    bi"                         d dl Z d dlmZ d dlmZmZmZmZ d dlZddl	m
Z
mZ ddlmZ ddlmZ dd	lmZ e G d
 de             Z	 	 ddZ G d dee
      Zy)    N)	dataclass)ListOptionalTupleUnion   )ConfigMixinregister_to_config)
BaseOutput)randn_tensor   )SchedulerMixinc                   0    e Zd ZU dZej
                  ed<   y)DDPMWuerstchenSchedulerOutputaA  
    Output class for the scheduler's step function output.

    Args:
        prev_sample (`torch.Tensor` of shape `(batch_size, num_channels, height, width)` for images):
            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the
            denoising loop.
    prev_sampleN)__name__
__module____qualname____doc__torchTensor__annotations__     j/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/schedulers/scheduling_ddpm_wuerstchen.pyr   r      s     r   r   c           
      $   |dk(  rd }n|dk(  rd }nt        d|       g }t        |       D ]<  }|| z  }|dz   | z  }|j                  t        d ||       ||      z  z
  |             > t	        j
                  |t        j                        S )a  
    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of
    (1-beta) over time from t = [0,1].

    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up
    to that part of the diffusion process.


    Args:
        num_diffusion_timesteps (`int`): the number of betas to produce.
        max_beta (`float`): the maximum beta to use; use values lower than 1 to
                     prevent singularities.
        alpha_transform_type (`str`, *optional*, default to `cosine`): the type of noise schedule for alpha_bar.
                     Choose from `cosine` or `exp`

    Returns:
        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs
    cosinec                 f    t        j                  | dz   dz  t         j                  z  dz        dz  S )NMb?gT㥛 ?r   )mathcospits    r   alpha_bar_fnz)betas_for_alpha_bar.<locals>.alpha_bar_fnE   s-    88QY%/$''9A=>!CCr   expc                 2    t        j                  | dz        S )Ng      ()r    r&   r#   s    r   r%   z)betas_for_alpha_bar.<locals>.alpha_bar_fnJ   s    88AI&&r   z"Unsupported alpha_transform_type: r   dtype)
ValueErrorrangeappendminr   tensorfloat32)num_diffusion_timestepsmax_betaalpha_transform_typer%   betasit1t2s           r   betas_for_alpha_barr7   ,   s    . x'	D 
	&	' =>R=STUUE*+ M((!e..S\"-R0@@@(KLM <<U]]33r   c                      e Zd ZdZe	 	 ddedefd       Zd Zddej                  de
e   d	ej                  fd
Z	 	 	 ddede
ee      deeej                   f   fdZ	 	 ddej                  dedej                  ded	eeef   f
dZdej                  dej                  dej                  d	ej                  fdZd Zd Zy)DDPMWuerstchenSchedulera  
    Denoising diffusion probabilistic models (DDPMs) explores the connections between denoising score matching and
    Langevin dynamics sampling.

    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`
    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.
    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and
    [`~SchedulerMixin.from_pretrained`] functions.

    For more details, see the original paper: https://huggingface.co/papers/2006.11239

    Args:
        scaler (`float`): ....
        s (`float`): ....
    scalersc                     || _         t        j                  |g      | _        t        j                  | j                  d| j                  z   z  t        j
                  z  dz        dz  | _        d| _        y )Nr         ?r         ?)r:   r   r.   r;   r!   r"   _init_alpha_cumprodinit_noise_sigma)selfr:   r;   s      r   __init__z DDPMWuerstchenScheduler.__init__i   s^     qc"#(99TVVq466z-BUXX-MPS-S#TXY#Y  !$r   c                    | j                   dkD  rdd|z
  | j                   z  z
  }n| j                   dk  r|| j                   z  }t        j                  || j                  j	                  |      z   d| j                  j	                  |      z   z  t        j
                  z  dz        dz  | j                  j	                  |      z  }|j                  dd      S )Nr   r=   r   g-C6?gH.?)r:   r   r!   r;   tor"   r?   clamp)rA   r$   devicealpha_cumprods       r   _alpha_cumprodz&DDPMWuerstchenScheduler._alpha_cumprodv   s    ;;?QUt{{**A[[1_4;;A		6""q46699V+<'<=H3N
)),,V45 ""6622r   Nsampletimestepreturnc                     |S )aP  
        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the
        current timestep.

        Args:
            sample (`torch.Tensor`): input sample
            timestep (`int`, optional): current timestep

        Returns:
            `torch.Tensor`: scaled input sample
        r   )rA   rI   rJ   s      r   scale_model_inputz)DDPMWuerstchenScheduler.scale_model_input   s	     r   num_inference_steps	timestepsrF   c                     |t        j                  dd|dz   |      }t        |t         j                        s$t        j                  |      j	                  |      }|| _        y)a  
        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.

        Args:
            num_inference_steps (`Dict[float, int]`):
                the number of diffusion steps used when generating samples with a pre-trained model. If passed, then
                `timesteps` must be `None`.
            device (`str` or `torch.device`, optional):
                the device to which the timesteps are moved to. {2 / 3: 20, 0.0: 10}
        Nr>   g        r   rF   )r   linspace
isinstancer   rD   rO   )rA   rN   rO   rF   s       r   set_timestepsz%DDPMWuerstchenScheduler.set_timesteps   sS      sC1Dq1HQWXI)U\\2Y/226:I"r   model_outputreturn_dictc           	         |j                   }|j                  }|}| j                  |      }	 | j                  ||      j                  |j                  d      g|j                  dd D 
cg c]  }
d c}
 } | j                  |	|      j                  |	j                  d      g|j                  dd D 
cg c]  }
d c}
 }||z  }d|z  j                         |d|z
  |z  d|z
  j                         z  z
  z  }t        |j                  ||j                  |j                         }d|z
  d|z
  z  d|z
  z  j                         |z  }|| |	dk7  j                         j                  |	j                  d      g|j                  dd D 
cg c]  }
d c}
 z  z   }|s|j                  |      fS t        |j                  |            S c c}
w c c}
w c c}
w )a~  
        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion
        process from the learned model outputs (most often the predicted noise).

        Args:
            model_output (`torch.Tensor`): direct output from learned diffusion model.
            timestep (`int`): current discrete timestep in the diffusion chain.
            sample (`torch.Tensor`):
                current instance of sample being created by diffusion process.
            generator: random number generator.
            return_dict (`bool`): option for returning tuple rather than DDPMWuerstchenSchedulerOutput class

        Returns:
            [`DDPMWuerstchenSchedulerOutput`] or `tuple`: [`DDPMWuerstchenSchedulerOutput`] if `return_dict` is True,
            otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.

        r   r   Nr>   )	generatorrF   r)   )r   )r)   rF   previous_timesteprH   viewsizeshapesqrtr   floatrD   r   )rA   rU   rJ   rI   rX   rV   r)   rF   r$   prev_t_rG   alpha_cumprod_prevalphamu	std_noisestdpreds                     r   stepzDDPMWuerstchenScheduler.step   s   2 ""$$''*;++Av6;;AFF1IfTZT`T`abacTdHeqHefET00@EEfkkRSnucicocopqprcsWt^_XYWtu 22Ek!Vq5yL.HAP]L]KcKcKe.e%ef Y|GZGZbnbtbtu	E	c$6673;NOUUWZccC46Q;--/44V[[^dRXR^R^_`_aRbFcQqFcdddGGEN$$,HH IfWt Gds   *	G
7	G
	Goriginal_samplesnoisec                 P   |j                   }|j                  } | j                  ||      j                  |j	                  d      g|j
                  dd  D cg c]  }d c} }|j                         |z  d|z
  j                         |z  z   }|j                  |      S c c}w )NrQ   r   r   r(   )rF   r)   rH   rZ   r[   r\   r]   rD   )	rA   rh   ri   rO   rF   r)   r`   rG   noisy_sampless	            r   	add_noisez!DDPMWuerstchenScheduler.add_noise   s     "(( &&J++If+EJJNN1
,<,B,B12,F Gq G
 &**,/??1}CTBZBZB\_dBdde,, !Hs   	B#
c                 .    | j                   j                  S N)confignum_train_timesteps)rA   s    r   __len__zDDPMWuerstchenScheduler.__len__   s    {{...r   c                     | j                   |d   z
  j                         j                         j                         }| j                   |dz      d    j	                  |j
                  d         }|S )Nr   r   )rO   absargminitemexpandr\   )rA   rJ   indexr_   s       r   rY   z)DDPMWuerstchenScheduler.previous_timestep   s_    (1+-224;;=BBD	*4077q8IJr   )r>   r   rn   )NNN)NT)r   r   r   r   r
   r^   rB   rH   r   r   r   intrM   r   r   strrF   rT   boolr   r   rg   rl   rq   rY   r   r   r   r9   r9   X   sB      
$
$ 
$ 
$3  Y^YeYe   $()-+/	# # DI&# c5<<'(	#6  ,Ill,I ,I 	,I ,I 
,e3	4,I\-,,- ||- <<	-
 
-/r   r9   )g+?r   )r    dataclassesr   typingr   r   r   r   r   configuration_utilsr	   r
   utilsr   utils.torch_utilsr   scheduling_utilsr   r   r7   r9   r   r   r   <module>r      s[   $  ! / /  A  , , 
J 
 
 !)4XNnk Nr   