
    biI                         d dl mZ d dlmZmZmZmZ d dlZd dl	Z	ddl
mZmZ ddlmZmZ ddlmZ dd	lmZ  ej(                  e      Ze G d
 de             Z G d dee      Zy)    )	dataclass)ListOptionalTupleUnionN   )ConfigMixinregister_to_config)
BaseOutputlogging)randn_tensor   )SchedulerMixinc                   0    e Zd ZU dZej
                  ed<   y)$CMStochasticIterativeSchedulerOutputa>  
    Output class for the scheduler's `step` function.

    Args:
        prev_sample (`torch.Tensor` of shape `(batch_size, num_channels, height, width)` for images):
            Computed sample `(x_{t-1})` of previous timestep. `prev_sample` should be used as next model input in the
            denoising loop.
    prev_sampleN)__name__
__module____qualname____doc__torchTensor__annotations__     m/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/schedulers/scheduling_consistency_models.pyr   r      s     r   r   c                   n   e Zd ZdZdZe	 	 	 	 	 	 	 d'dedededededed	efd
       Z	e
d        Ze
d        Zd(defdZdej                  deeej                  f   dej                  fdZdeeej&                  f   fdZ	 	 	 d)dee   deeej.                  f   deee      fdZd Zd Zd Zd*dZd Z	 	 d+dej                  deeej                  f   dej                  d eej>                     d!edee e!f   fd"Z"d#ej                  d$ej                  dej                  dej                  fd%Z#d& Z$y),CMStochasticIterativeSchedulera  
    Multistep and onestep sampling for consistency models.

    This model inherits from [`SchedulerMixin`] and [`ConfigMixin`]. Check the superclass documentation for the generic
    methods the library implements for all schedulers such as loading and saving.

    Args:
        num_train_timesteps (`int`, defaults to 40):
            The number of diffusion steps to train the model.
        sigma_min (`float`, defaults to 0.002):
            Minimum noise magnitude in the sigma schedule. Defaults to 0.002 from the original implementation.
        sigma_max (`float`, defaults to 80.0):
            Maximum noise magnitude in the sigma schedule. Defaults to 80.0 from the original implementation.
        sigma_data (`float`, defaults to 0.5):
            The standard deviation of the data distribution from the EDM
            [paper](https://huggingface.co/papers/2206.00364). Defaults to 0.5 from the original implementation.
        s_noise (`float`, defaults to 1.0):
            The amount of additional noise to counteract loss of detail during sampling. A reasonable range is [1.000,
            1.011]. Defaults to 1.0 from the original implementation.
        rho (`float`, defaults to 7.0):
            The parameter for calculating the Karras sigma schedule from the EDM
            [paper](https://huggingface.co/papers/2206.00364). Defaults to 7.0 from the original implementation.
        clip_denoised (`bool`, defaults to `True`):
            Whether to clip the denoised outputs to `(-1, 1)`.
        timesteps (`List` or `np.ndarray` or `torch.Tensor`, *optional*):
            An explicit timestep schedule that can be optionally specified. The timesteps are expected to be in
            increasing order.
    r   num_train_timesteps	sigma_min	sigma_max
sigma_datas_noiserhoclip_denoisedc                 r   || _         t        j                  dd|      }| j                  |      }	| j	                  |	      }
d | _        t        j                  |	      | _        t        j                  |
      | _	        d| _
        d| _        d | _        d | _        | j                  j                  d      | _        y )Nr   r   Fcpu)init_noise_sigmanplinspace_convert_to_karras
sigma_to_tnum_inference_stepsr   
from_numpysigmas	timestepscustom_timestepsis_scale_input_called_step_index_begin_indexto)selfr   r    r!   r"   r#   r$   r%   rampr/   r0   s              r   __init__z'CMStochasticIterativeScheduler.__init__L   s     !*{{1a!45((.OOF+	 $( &&v.)))4 %%*" kknnU+r   c                     | j                   S )zg
        The index counter for current timestep. It will increase 1 after each scheduler step.
        )r3   r6   s    r   
step_indexz)CMStochasticIterativeScheduler.step_indexh   s    
 r   c                     | j                   S )zq
        The index for the first timestep. It should be set from pipeline with `set_begin_index` method.
        r4   r:   s    r   begin_indexz*CMStochasticIterativeScheduler.begin_indexo   s    
    r   r>   c                     || _         y)z
        Sets the begin index for the scheduler. This function should be run from pipeline before the inference.

        Args:
            begin_index (`int`):
                The begin index for the scheduler.
        Nr=   )r6   r>   s     r   set_begin_indexz.CMStochasticIterativeScheduler.set_begin_indexw   s     (r   sampletimestepreturnc                     | j                   | j                  |       | j                  | j                      }||dz  | j                  j                  dz  z   dz  z  }d| _        |S )av  
        Scales the consistency model input by `(sigma**2 + sigma_data**2) ** 0.5`.

        Args:
            sample (`torch.Tensor`):
                The input sample.
            timestep (`float` or `torch.Tensor`):
                The current timestep in the diffusion chain.

        Returns:
            `torch.Tensor`:
                A scaled input sample.
        r         ?T)r;   _init_step_indexr/   configr"   r2   )r6   rA   rB   sigmas       r   scale_model_inputz0CMStochasticIterativeScheduler.scale_model_input   sb     ??"!!(+DOO,E1Ht{{'='=q'@@SHI%)"r   r/   c                     t        |t        j                        s%t        j                  |t        j                        }dt        j
                  |dz         z  }|S )ab  
        Gets scaled timesteps from the Karras sigmas for input to the consistency model.

        Args:
            sigmas (`float` or `np.ndarray`):
                A single Karras sigma or an array of Karras sigmas.

        Returns:
            `float` or `np.ndarray`:
                A scaled input timestep or scaled input timestep array.
        dtypeg     @o@ggE6)
isinstancer)   ndarrayarrayfloat64log)r6   r/   r0   s      r   r,   z)CMStochasticIterativeScheduler.sigma_to_t   sC     &"**-XXfBJJ7F"&&%"88	r   Nr-   devicer0   c           	      *   ||t        d      ||t        d      |t        dt        |            D ]  }||   ||dz
     k\  st        d       |d   | j                  j                  k\  r#t        d| j                  j                   d      t        j                  |t
        j                  	      }d
| _        n|| j                  j                  kD  r=t        d| d| j                  j                   d| j                  j                   d      || _	        | j                  j                  | j                  z  }t        j                  d|      |z  j                         ddd   j                         j                  t
        j                        }d| _        | j                  j                  }|ddd   j                         }||dz
  z  }| j                  |      }| j                  |      }t        j                   || j                  j"                  gg      j                  t
        j$                        }t'        j(                  |      j+                  |      | _        t/        |      j1                  d      r:t'        j(                  |      j+                  |t&        j$                  	      | _        n*t'        j(                  |      j+                  |      | _        d| _        d| _        | j,                  j+                  d      | _        y)a  
        Sets the timesteps used for the diffusion chain (to be run before inference).

        Args:
            num_inference_steps (`int`):
                The number of diffusion steps used when generating samples with a pre-trained model.
            device (`str` or `torch.device`, *optional*):
                The device to which the timesteps should be moved to. If `None`, the timesteps are not moved.
            timesteps (`List[int]`, *optional*):
                Custom timesteps used to support arbitrary spacing between timesteps. If `None`, then the default
                timestep spacing strategy of equal spacing between timesteps is used. If `timesteps` is passed,
                `num_inference_steps` must be `None`.
        NzEExactly one of `num_inference_steps` or `timesteps` must be supplied.z:Can only pass one of `num_inference_steps` or `timesteps`.r   z(`timesteps` must be in descending order.r   z=`timesteps` must start before `self.config.train_timesteps`: .rK   Tz`num_inference_steps`: z6 cannot be larger than `self.config.train_timesteps`: zG as the unet model trained with this scheduler can only handle maximal z timesteps.F)rR   mpsr'   )
ValueErrorrangelenrG   r   r)   rO   int64r1   r-   arangeroundcopyastyper+   r,   concatenater    float32r   r.   r5   r/   str
startswithr0   r3   r4   )	r6   r-   rR   r0   i
step_ratior   r7   r/   s	            r   set_timestepsz,CMStochasticIterativeScheduler.set_timesteps   s   & &9+<dee*y/DYZZ  1c)n- QQ<9QU#33$%OPPQ |t{{>>> STXT_T_TsTsSttuv  "((;I$(D!"T[[%D%DD -.A-B C778 9  $ ? ?@M  (;D$88D<T<TTJ1&9:ZGNNPQUSUQUV[[]ddegememnI$)D! #kk==2##%*Q./((.OOF+	$++*?*?)@ ABII"**U&&v.111@v;!!%("--i8;;F%--;XDN"--i8;;6;JDN kknnU+r   c                     | j                   j                  }| j                   j                  }| j                   j                  }|d|z  z  }|d|z  z  }||||z
  z  z   |z  }|S )z6Constructs the noise schedule of Karras et al. (2022).r   )rG   r    r!   r$   )r6   r7   r    r!   r$   min_inv_rhomax_inv_rhor/   s           r   r+   z1CMStochasticIterativeScheduler._convert_to_karras   si      ;;00	;;00	kkooAG,AG,k(A BBsJr   c                     | j                   j                  }|dz  |dz  |dz  z   z  }||z  |dz  |dz  z   dz  z  }||fS )Nr   rE   )rG   r"   )r6   rH   r"   c_skipc_outs        r   get_scalingsz+CMStochasticIterativeScheduler.get_scalings  sW    [[++
Q%(Z]":;
"eQhQ&>3%FFu}r   c                     | j                   j                  }| j                   j                  }|dz  ||z
  dz  |dz  z   z  }||z
  |z  |dz  |dz  z   dz  z  }||fS )a  
        Gets the scalings used in the consistency model parameterization (from Appendix C of the
        [paper](https://huggingface.co/papers/2303.01469)) to enforce boundary condition.

        <Tip>

        `epsilon` in the equations for `c_skip` and `c_out` is set to `sigma_min`.

        </Tip>

        Args:
            sigma (`torch.Tensor`):
                The current sigma in the Karras sigma schedule.

        Returns:
            `tuple`:
                A two-element tuple where `c_skip` (which weights the current sample) is the first element and `c_out`
                (which weights the consistency model output) is the second element.
        r   rE   )rG   r    r"   )r6   rH   r    r"   rj   rk   s         r   #get_scalings_for_boundary_conditionzBCMStochasticIterativeScheduler.get_scalings_for_boundary_condition
  st    ( KK))	[[++
Q59#4":Z]"JK"j0E1Hz1}4LQT3TTu}r   c                     || j                   }||k(  j                         }t        |      dkD  rdnd}||   j                         S )Nr   r   )r0   nonzerorY   item)r6   rB   schedule_timestepsindicesposs        r   index_for_timestepz1CMStochasticIterativeScheduler.index_for_timestep&  sL    %!%%1::< w<!#as|  ""r   c                     | j                   Vt        |t        j                        r%|j	                  | j
                  j                        }| j                  |      | _        y | j                  | _        y N)
r>   rM   r   r   r5   r0   rR   ru   r3   r4   )r6   rB   s     r   rF   z/CMStochasticIterativeScheduler._init_step_index5  sU    #(ELL1#;;t~~'<'<=#66x@D#00Dr   model_output	generatorreturn_dictc                 L   t        |t        t        j                  t        j                  f      rt        d| j                   d      | j                  st        j                  d       | j                  j                  }| j                  j                  }| j                  | j                  |       | j                  | j                     }| j                  dz   | j                  j                   k  r| j                  | j                  dz      }	n| j                  d   }	| j#                  |      \  }
}||z  |
|z  z   }| j                  j$                  r|j'                  dd      }t)        | j*                        dkD  r.t-        |j.                  |j0                  |j2                  |      }nt        j4                  |      }|| j                  j6                  z  }|	j'                  ||      }|||dz  |dz  z
  d	z  z  z   }| xj8                  dz  c_        |s|fS t;        |
      S )a  
        Predict the sample from the previous timestep by reversing the SDE. This function propagates the diffusion
        process from the learned model outputs (most often the predicted noise).

        Args:
            model_output (`torch.Tensor`):
                The direct output from the learned diffusion model.
            timestep (`float`):
                The current timestep in the diffusion chain.
            sample (`torch.Tensor`):
                A current instance of a sample created by the diffusion process.
            generator (`torch.Generator`, *optional*):
                A random number generator.
            return_dict (`bool`, *optional*, defaults to `True`):
                Whether or not to return a
                [`~schedulers.scheduling_consistency_models.CMStochasticIterativeSchedulerOutput`] or `tuple`.

        Returns:
            [`~schedulers.scheduling_consistency_models.CMStochasticIterativeSchedulerOutput`] or `tuple`:
                If return_dict is `True`,
                [`~schedulers.scheduling_consistency_models.CMStochasticIterativeSchedulerOutput`] is returned,
                otherwise a tuple is returned where the first element is the sample tensor.
        zLPassing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to `z\.step()` is not supported. Make sure to pass one of the `scheduler.timesteps` as a timestep.zThe `scale_model_input` function should be called before `step` to ensure correct denoising. See `StableDiffusionPipeline` for a usage example.r   rU   )rL   rR   ry   )minmaxr   rE   )r   )rM   intr   	IntTensor
LongTensorrW   	__class__r2   loggerwarningrG   r    r!   r;   rF   r/   r   rn   r%   clamprY   r0   r   shaperL   rR   
zeros_liker#   r3   r   )r6   rx   rB   rA   ry   rz   r    r!   rH   
sigma_nextrj   rk   denoisednoisez	sigma_hatr   s                    r   stepz#CMStochasticIterativeScheduler.step=  s   @ heoou7G7G HI( )GG  ))NNE
 KK))	KK))	??"!!(+ DOO,??Q!@!@@T__q%89J RJ @@G <'&6/9;;$$~~b!,H t~~" "",*<*<\EXEXdmE $$\2EDKK'''$$	$B	 ilY\&Ac%I!II 	A>!3LLr   original_samplesr   c                    | j                   j                  |j                  |j                        }|j                  j                  dk(  rvt        j                  |      ra| j                  j                  |j                  t
        j                        }|j                  |j                  t
        j                        }n@| j                  j                  |j                        }|j                  |j                        }| j                   |D cg c]  }| j                  ||       }}nG| j                  | j                  g|j                  d   z  }n| j                  g|j                  d   z  }||   j                         }t        |j                        t        |j                        k  r=|j                  d      }t        |j                        t        |j                        k  r=|||z  z   }	|	S c c}w )N)rR   rL   rV   rK   r   rU   )r/   r5   rR   rL   typer   is_floating_pointr0   r`   r>   ru   r;   r   flattenrY   	unsqueeze)
r6   r   r   r0   r/   rr   tstep_indicesrH   noisy_sampless
             r   	add_noisez(CMStochasticIterativeScheduler.add_noise  s    '7'>'>FVF\F\]""''50U5L5LY5W!%!2!23C3J3JRWR_R_!2!`!%5%<%<EMMRI!%!2!23C3J3J!K!%5%<%<=I #T]^qD33A7IJ^L^__( OO,yq/AAL !,,-	0BBL|$,,.%++%5%;%;!<<OOB'E %++%5%;%;!<< )55=8 _s   G9c                 .    | j                   j                  S rw   )rG   r   r:   s    r   __len__z&CMStochasticIterativeScheduler.__len__  s    {{...r   )(   gMb`?g      T@rE   g      ?g      @T)r   )NNNrw   )NT)%r   r   r   r   orderr
   r~   floatboolr8   propertyr;   r>   r@   r   r   r   rI   r)   rN   r,   r   ra   rR   r   re   r+   rl   rn   ru   rF   	Generatorr   r   r   r   r   r   r   r   r   r   ,   s"   : E $& ", , , 	,
 , , , , ,6     ! !(3 ( eU\\FY@Z _d_k_k 2ubjj'8!9 * .2+/)-	G,%c]G, c5<<'(G, DI&	G,T
8#1 04 [Mll[M u||+,[M 	[M
 EOO,[M [M 
3U:	;[M|,, || <<	
 
B/r   r   )dataclassesr   typingr   r   r   r   numpyr)   r   configuration_utilsr	   r
   utilsr   r   utils.torch_utilsr   scheduling_utilsr   
get_loggerr   r   r   r   r   r   r   <module>r      sa    " / /   A ' , , 
		H	% 
: 
 
Q/^[ Q/r   