
    biX                         d dl Z d dlmZmZmZmZ d dlZd dlZddl	m
Z
mZ ddlmZmZ ddlmZmZmZ  e       rd dlZ	 	 ddZ G d	 d
ee
      Zy)    N)ListOptionalTupleUnion   )ConfigMixinregister_to_config)	deprecateis_scipy_available   )KarrasDiffusionSchedulersSchedulerMixinSchedulerOutputc           
      $   |dk(  rd }n|dk(  rd }nt        d|       g }t        |       D ]<  }|| z  }|dz   | z  }|j                  t        d ||       ||      z  z
  |             > t	        j
                  |t        j                        S )a  
    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of
    (1-beta) over time from t = [0,1].

    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up
    to that part of the diffusion process.


    Args:
        num_diffusion_timesteps (`int`): the number of betas to produce.
        max_beta (`float`): the maximum beta to use; use values lower than 1 to
                     prevent singularities.
        alpha_transform_type (`str`, *optional*, default to `cosine`): the type of noise schedule for alpha_bar.
                     Choose from `cosine` or `exp`

    Returns:
        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs
    cosinec                 f    t        j                  | dz   dz  t         j                  z  dz        dz  S )NgMb?gT㥛 ?r   )mathcospits    i/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/schedulers/scheduling_deis_multistep.pyalpha_bar_fnz)betas_for_alpha_bar.<locals>.alpha_bar_fn;   s-    88QY%/$''9A=>!CC    expc                 2    t        j                  | dz        S )Ng      ()r   r   r   s    r   r   z)betas_for_alpha_bar.<locals>.alpha_bar_fn@   s    88AI&&r   z"Unsupported alpha_transform_type: r   dtype)
ValueErrorrangeappendmintorchtensorfloat32)num_diffusion_timestepsmax_betaalpha_transform_typer   betasit1t2s           r   betas_for_alpha_barr-   "   s    . x'	D 
	&	' =>R=STUUE*+ M((!e..S\"-R0@@@(KLM <<U]]33r   c            .          e Zd ZdZeD  cg c]  }|j
                   c}} ZdZe	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d@de	de
de
dedeej                     d	e	d
edede
de
dedededee   dee   dee   dee   dee
   dede	dedef,d       Zed        Zed        ZdAde	fdZ	 dBde	d eeej.                  f   d!ee
   fd"Zd#ej2                  d$ej2                  fd%Zd& Zd' Zd(ej2                  d$ej2                  fd)Zd(ej2                  de	d$ej2                  fd*Z	 dCd(ej2                  de	d+e
d,e
d$ej2                  f
d-Zdd.d/ej2                  d#ej2                  d$ej2                  fd0Z dd.d/ej2                  d#ej2                  d$ej2                  fd1Z!dd.d2e"ej2                     d#ej2                  d$ej2                  fd3Z#dd.d2e"ej2                     d#ej2                  d$ej2                  fd4Z$dDd5Z%d6 Z&	 dEd/ej2                  d7ee	ej2                  f   d#ej2                  d8ed$ee'e(f   f
d9Z)d#ej2                  d$ej2                  fd:Z*d;ej2                  d<ej2                  d=ejV                  d$ej2                  fd>Z,d? Z-yc c}} w )FDEISMultistepScheduleru  
    `DEISMultistepScheduler` is a fast high order solver for diffusion ordinary differential equations (ODEs).

    This model inherits from [`SchedulerMixin`] and [`ConfigMixin`]. Check the superclass documentation for the generic
    methods the library implements for all schedulers such as loading and saving.

    Args:
        num_train_timesteps (`int`, defaults to 1000):
            The number of diffusion steps to train the model.
        beta_start (`float`, defaults to 0.0001):
            The starting `beta` value of inference.
        beta_end (`float`, defaults to 0.02):
            The final `beta` value.
        beta_schedule (`str`, defaults to `"linear"`):
            The beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from
            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.
        trained_betas (`np.ndarray`, *optional*):
            Pass an array of betas directly to the constructor to bypass `beta_start` and `beta_end`.
        solver_order (`int`, defaults to 2):
            The DEIS order which can be `1` or `2` or `3`. It is recommended to use `solver_order=2` for guided
            sampling, and `solver_order=3` for unconditional sampling.
        prediction_type (`str`, defaults to `epsilon`):
            Prediction type of the scheduler function; can be `epsilon` (predicts the noise of the diffusion process),
            `sample` (directly predicts the noisy sample`) or `v_prediction` (see section 2.4 of [Imagen
            Video](https://imagen.research.google/video/paper.pdf) paper).
        thresholding (`bool`, defaults to `False`):
            Whether to use the "dynamic thresholding" method. This is unsuitable for latent-space diffusion models such
            as Stable Diffusion.
        dynamic_thresholding_ratio (`float`, defaults to 0.995):
            The ratio for the dynamic thresholding method. Valid only when `thresholding=True`.
        sample_max_value (`float`, defaults to 1.0):
            The threshold value for dynamic thresholding. Valid only when `thresholding=True`.
        algorithm_type (`str`, defaults to `deis`):
            The algorithm type for the solver.
        lower_order_final (`bool`, defaults to `True`):
            Whether to use lower-order solvers in the final steps. Only valid for < 15 inference steps.
        use_karras_sigmas (`bool`, *optional*, defaults to `False`):
             Whether to use Karras sigmas for step sizes in the noise schedule during the sampling process. If `True`,
             the sigmas are determined according to a sequence of noise levels {σi}.
        use_exponential_sigmas (`bool`, *optional*, defaults to `False`):
            Whether to use exponential sigmas for step sizes in the noise schedule during the sampling process.
        use_beta_sigmas (`bool`, *optional*, defaults to `False`):
            Whether to use beta sigmas for step sizes in the noise schedule during the sampling process. Refer to [Beta
            Sampling is All You Need](https://huggingface.co/papers/2407.12173) for more information.
        timestep_spacing (`str`, defaults to `"linspace"`):
            The way the timesteps should be scaled. Refer to Table 2 of the [Common Diffusion Noise Schedules and
            Sample Steps are Flawed](https://huggingface.co/papers/2305.08891) for more information.
        steps_offset (`int`, defaults to 0):
            An offset added to the inference steps, as required by some model families.
    r   Nnum_train_timesteps
beta_startbeta_endbeta_scheduletrained_betassolver_orderprediction_typethresholdingdynamic_thresholding_ratiosample_max_valuealgorithm_typesolver_typelower_order_finaluse_karras_sigmasuse_exponential_sigmasuse_beta_sigmasuse_flow_sigmas
flow_shifttimestep_spacingsteps_offsetuse_dynamic_shiftingtime_shift_typec                    | j                   j                  rt               st        d      t	        | j                   j                  | j                   j
                  | j                   j                  g      dkD  rt        d      |+t        j                  |t        j                        | _        n|dk(  r-t        j                  |||t        j                        | _        nk|dk(  r6t        j                  |dz  |dz  |t        j                        dz  | _        n0|d	k(  rt        |      | _        nt        | d
| j                         d| j                  z
  | _        t        j"                  | j                   d      | _        t        j&                  | j$                        | _        t        j&                  d| j$                  z
        | _        t        j,                  | j(                        t        j,                  | j*                        z
  | _        d| j$                  z
  | j$                  z  dz  | _        d| _        |dvr1|dv r| j5                  d       nt        | d
| j                         |dvr2|dv r| j5                  d       nt        d| d
| j                         d | _        t9        j                  d|dz
  |t8        j                        d d d   j;                         }t        j<                  |      | _        d g|z  | _         d| _!        d | _"        d | _#        | j0                  jI                  d      | _        y )Nz:Make sure to install scipy if you want to use beta sigmas.r   znOnly one of `config.use_beta_sigmas`, `config.use_exponential_sigmas`, `config.use_karras_sigmas` can be used.r   linearscaled_linear      ?r   squaredcos_cap_v2z is not implemented for       ?r   dim)deis)	dpmsolverzdpmsolver++rN   )r:   )logrho)midpointheunbh1bh2rP   )r;   zsolver type cpu)%configr?   r   ImportErrorsumr>   r=   r   r#   r$   r%   r)   linspacer-   NotImplementedError	__class__alphascumprodalphas_cumprodsqrtalpha_tsigma_tloglambda_tsigmasinit_noise_sigmar	   num_inference_stepsnpcopy
from_numpy	timestepsmodel_outputslower_order_nums_step_index_begin_indexto)selfr0   r1   r2   r3   r4   r5   r6   r7   r8   r9   r:   r;   r<   r=   r>   r?   r@   rA   rB   rC   rD   rE   rk   s                           r   __init__zDEISMultistepScheduler.__init__   s   4 ;;&&/A/CZ[[++T[[-O-OQUQ\Q\QnQnopstt A  $m5==IDJh&
H>QY^YfYfgDJo-
C3H[chcpcpquvvDJ11,-@ADJ%7OPTP^P^O_&`aaDJJ&#mmDKKQ?zz$"5"56zz!d&9&9"9:		$,,/%))DLL2IID///43F3FF3N !$ )!==''v'>)^,<<TUYUcUcTd*effj(@@''H'=)LE]^b^l^l]m*noo $( KK#6#:<OWYWaWabcgegcghmmo	)))4"Vl2 ! kknnU+r   c                     | j                   S )zg
        The index counter for current timestep. It will increase 1 after each scheduler step.
        )rn   rq   s    r   
step_indexz!DEISMultistepScheduler.step_index   s    
 r   c                     | j                   S )zq
        The index for the first timestep. It should be set from pipeline with `set_begin_index` method.
        ro   rt   s    r   begin_indexz"DEISMultistepScheduler.begin_index   s    
    r   rx   c                     || _         y)z
        Sets the begin index for the scheduler. This function should be run from pipeline before the inference.

        Args:
            begin_index (`int`):
                The begin index for the scheduler.
        Nrw   )rq   rx   s     r   set_begin_indexz&DEISMultistepScheduler.set_begin_index   s     (r   rg   devicemuc           	      X   |U| j                   j                  r| j                   j                  dk(  sJ t        j                  |      | j                   _        | j                   j                  dk(  rut        j                  d| j                   j                  dz
  |dz         j                         ddd   dd j                         j                  t        j                        }nl| j                   j                  dk(  r| j                   j                  |dz   z  }t        j                  d|dz         |z  j                         ddd   dd j                         j                  t        j                        }|| j                   j                  z  }n| j                   j                  dk(  r| j                   j                  |z  }t        j                  | j                   j                  d|       j                         j                         j                  t        j                        }|dz  }n"t        | j                   j                   d	      t        j                   d| j"                  z
  | j"                  z  d
z        }t        j$                  |      }| j                   j&                  rt        j(                  |      j                         }| j+                  ||      }t        j                   |D cg c]  }| j-                  ||       c}      j                         }t        j.                  ||dd g      j                  t        j0                        }n| j                   j2                  rt        j(                  |      j                         }| j5                  ||      }t        j                   |D cg c]  }| j-                  ||       c}      }t        j.                  ||dd g      j                  t        j0                        }nN| j                   j6                  rt        j(                  |      j                         }| j9                  ||      }t        j                   |D cg c]  }| j-                  ||       c}      }t        j.                  ||dd g      j                  t        j0                        }n| j                   j:                  rt        j                  dd| j                   j                  z  |dz         }	d|	z
  }t        j(                  | j                   j
                  |z  d| j                   j
                  dz
  |z  z   z        dd j                         }|| j                   j                  z  j                         }t        j.                  ||dd g      j                  t        j0                        }nt        j<                  |t        j                  dt?        |            |      }d| j"                  d   z
  | j"                  d   z  d
z  }
t        j.                  ||
gg      j                  t        j0                        }tA        jB                  |      | _"        tA        jB                  |      jG                  |t@        j                        | _$        t?        |      | _%        dg| j                   jL                  z  | _'        d| _(        d| _)        d| _*        | jD                  jG                  d      | _"        yc c}w c c}w c c}w )a  
        Sets the discrete timesteps used for the diffusion chain (to be run before inference).

        Args:
            num_inference_steps (`int`):
                The number of diffusion steps used when generating samples with a pre-trained model.
            device (`str` or `torch.device`, *optional*):
                The device to which the timesteps should be moved to. If `None`, the timesteps are not moved.
        NexponentialrZ   r   r   rU   leadingtrailingzY is not supported. Please make sure to choose one of 'linspace', 'leading' or 'trailing'.rI   )	in_sigmasrg   rK   r{   r   rV   )+rW   rD   rE   rh   r   rA   rB   rZ   r0   roundri   astypeint64arangerC   r   arrayr_   rc   r=   flip_convert_to_karras_sigma_to_tconcatenater%   r>   _convert_to_exponentialr?   _convert_to_betar@   interplenr#   rj   re   rp   rk   rg   r5   rl   rm   rn   ro   )rq   rg   r{   r|   rk   
step_ratiore   
log_sigmassigmar]   
sigma_lasts              r   set_timestepsz$DEISMultistepScheduler.set_timesteps   s    >;;338S8SWd8ddd%'VVBZDKK";;'':5At{{>>BDWZ[D[\2"$!	  [[))Y688=PST=TUJ 1&9A&=>KRRTUYWYUYZ[^\^_ddfmmnpnvnvwI111I[[))Z788;NNJ 		$++"A"A1zkRXXZ__ahhikiqiqrINI;;//0  1J  K  A 3 33t7J7JJsRSVVF^
;;((WWV_))+F,,vSf,gFSY!Z%$"2"25*"E!Z[aacI^^VVBC[$9:AA"**MF[[//WWV_))+F11FXk1lFSY!Z%$"2"25*"E!Z[I^^VVBC[$9:AA"**MF[[((WWV_))+F**VQd*eFSY!Z%$"2"25*"E!Z[I^^VVBC[$9:AA"**MF[[(([[A(G(G$GI\_`I`aF6\FWWT[[33f<T[[E[E[^_E_ciDi@ijklomopuuwF$++"A"AAGGII^^VVBC[$9:AA"**MFYYy"))As6{*CVLFt221559L9LQ9OOTWWJ^^Vj\$:;BB2::NF&&v.)))477vU[[7Y#&y>  
KK$$% !"   kknnU+I "[
 "[
 "[s   8\>\"6\'samplereturnc                 b   |j                   }|j                  ^}}}|t        j                  t        j                  fvr|j                         }|j                  ||t        j                  |      z        }|j                         }t        j                  || j                  j                  d      }t        j                  |d| j                  j                        }|j                  d      }t        j                  || |      |z  } |j                  ||g| }|j!                  |      }|S )a{  
        "Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the
        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by
        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing
        pixels from saturation at each step. We find that dynamic thresholding results in significantly better
        photorealism as well as better image-text alignment, especially when using very large guidance weights."

        https://huggingface.co/papers/2205.11487
        r   rL   )r"   max)r   shaper#   r%   float64floatreshaperh   prodabsquantilerW   r8   clampr9   	unsqueezerp   )rq   r   r   
batch_sizechannelsremaining_dims
abs_sampless           r   _threshold_samplez(DEISMultistepScheduler._threshold_sampleA  s     06-
H~66\\^F 
Hrww~7N,NOZZ\
NN:t{{'M'MSTUKK1$++66
 KKNVaR+a/
HF~F5!r   c                    t        j                  t        j                  |d            }||d d t         j                  f   z
  }t        j                  |dk\  d      j                  d      j                  |j                  d   dz
        }|dz   }||   }||   }||z
  ||z
  z  }	t        j                  |	dd      }	d|	z
  |z  |	|z  z   }
|
j                  |j                        }
|
S )Ng|=r   )axisr   )r   r   )	rh   rc   maximumnewaxiscumsumargmaxclipr   r   )rq   r   r   	log_sigmadistslow_idxhigh_idxlowhighwr   s              r   r   z"DEISMultistepScheduler._sigma_to_tc  s    FF2::eU34	 Jq"**}55 ))UaZq188a8@EE*JZJZ[\J]`aJaEbQ;!(# 9_t,GGAq! UgH,IIekk"r   c                 r    | j                   j                  rd|z
  }|}||fS d|dz  dz   dz  z  }||z  }||fS )Nr   r   rI   )rW   r@   )rq   r   ra   rb   s       r   _sigma_to_alpha_sigma_tz.DEISMultistepScheduler._sigma_to_alpha_sigma_t{  sW    ;;&&%iGG
  E1HqLS01GgoGr   r   c                    t        | j                  d      r| j                  j                  }nd}t        | j                  d      r| j                  j                  }nd}||n|d   j	                         }||n|d   j	                         }d}t        j                  dd|      }|d|z  z  }|d|z  z  }||||z
  z  z   |z  }	|	S )z6Constructs the noise schedule of Karras et al. (2022).	sigma_minN	sigma_maxrU   r   g      @r   )hasattrrW   r   r   itemrh   rZ   )
rq   r   rg   r   r   rhorampmin_inv_rhomax_inv_rhore   s
             r   r   z)DEISMultistepScheduler._convert_to_karras  s    
 4;;,--II4;;,--II!*!6IIbM<N<N<P	!*!6IIaL<M<M<O	{{1a!45AG,AG,k(A BBsJr   c                    t        | j                  d      r| j                  j                  }nd}t        | j                  d      r| j                  j                  }nd}||n|d   j	                         }||n|d   j	                         }t        j                  t        j                  t        j                  |      t        j                  |      |            }|S )z)Constructs an exponential noise schedule.r   Nr   rU   r   )
r   rW   r   r   r   rh   r   rZ   r   rc   )rq   r   rg   r   r   re   s         r   r   z.DEISMultistepScheduler._convert_to_exponential  s    
 4;;,--II4;;,--II!*!6IIbM<N<N<P	!*!6IIaL<M<M<O	DHHY$7)9LNabcr   alphabetac           
      (   t        | j                  d      r| j                  j                  }nd}t        | j                  d      r| j                  j                  }nd}||n|d   j	                         }||n|d   j	                         }t        j                  dt        j                  dd|      z
  D cg c]-  }t        j                  j                  j                  |||      / c}D cg c]  }||||z
  z  z    c}      }	|	S c c}w c c}w )zJFrom "Beta Sampling is All You Need" [arXiv:2407.12173] (Lee et. al, 2024)r   Nr   rU   r   r   )r   rW   r   r   r   rh   r   rZ   scipystatsr   ppf)
rq   r   rg   r   r   r   r   timestepr   re   s
             r   r   z'DEISMultistepScheduler._convert_to_beta  s     4;;,--II4;;,--II!*!6IIbM<N<N<P	!*!6IIaL<M<M<O	
 %&Aq:M(N$N  KK$$((5$? SI	$9:;
 s   82D
/Dr   model_outputc                :   t        |      dkD  r|d   n|j                  dd      }|t        |      dkD  r|d   }nt        d      |t        ddd       | j                  | j
                     }| j                  |      \  }}| j                  j                  d	k(  r|||z  z
  |z  }	n| j                  j                  d
k(  r|}	n| j                  j                  dk(  r||z  ||z  z
  }	n^| j                  j                  dk(  r"| j                  | j
                     }|||z  z
  }	n#t        d| j                  j                   d      | j                  j                  r| j                  |	      }	| j                  j                  dk(  r|||	z  z
  |z  S t        d      )a  
        Convert the model output to the corresponding type the DEIS algorithm needs.

        Args:
            model_output (`torch.Tensor`):
                The direct output from the learned diffusion model.
            timestep (`int`):
                The current discrete timestep in the diffusion chain.
            sample (`torch.Tensor`):
                A current instance of a sample created by the diffusion process.

        Returns:
            `torch.Tensor`:
                The converted model output.
        r   r   Nr   /missing `sample` as a required keyword argumentrk   1.0.0Passing `timesteps` is deprecated and has no effect as model output conversion is now handled via an internal counter `self.step_index`epsilonr   v_predictionflow_predictionzprediction_type given as zi must be one of `epsilon`, `sample`, `v_prediction`, or `flow_prediction` for the DEISMultistepScheduler.rN   'only support log-rho multistep deis now)r   popr   r
   re   ru   r   rW   r6   r7   r   r:   r[   )
rq   r   r   argskwargsr   r   ra   rb   x0_preds
             r   convert_model_outputz+DEISMultistepScheduler.convert_model_output  s   , "$i!m47J1M>4y1}a !RSS Z DOO,77>;;&&)3, 66'AG[[((H4"G[[((N:&<)??G[[((,==kk$//2Gw55G+DKK,G,G+H IW W 
 ;;##,,W5G;;%%/Ww..'99%&OPPr   c                   t        |      dkD  r|d   n|j                  dd      }t        |      dkD  r|d   n|j                  dd      }|t        |      dkD  r|d   }nt        d      |t        dd	d
       |t        dd	d       | j                  | j
                  dz      | j                  | j
                     }}| j                  |      \  }	}| j                  |      \  }
}t        j                  |	      t        j                  |      z
  }t        j                  |
      t        j                  |      z
  }||z
  }| j                  j                  dk(  r)|	|
z  |z  |t        j                  |      dz
  z  |z  z
  }|S t        d      )au  
        One step for the first-order DEIS (equivalent to DDIM).

        Args:
            model_output (`torch.Tensor`):
                The direct output from the learned diffusion model.
            timestep (`int`):
                The current discrete timestep in the diffusion chain.
            prev_timestep (`int`):
                The previous discrete timestep in the diffusion chain.
            sample (`torch.Tensor`):
                A current instance of a sample created by the diffusion process.

        Returns:
            `torch.Tensor`:
                The sample tensor at the previous timestep.
        r   r   Nr   prev_timestepr   r   rk   r   r   Passing `prev_timestep` is deprecated and has no effect as model output conversion is now handled via an internal counter `self.step_index`rN   rK   r   )r   r   r   r
   re   ru   r   r#   rc   rW   r:   r   r[   )rq   r   r   r   r   r   r   rb   sigma_sra   alpha_srd   lambda_shx_ts                  r   deis_first_order_updatez.DEISMultistepScheduler.deis_first_order_update  s   0 "$i!m47J1M#&t9q=QfjjRV6W>4y1}a !RSS Z $ ^  ;;t':;T[[=Y77@77@99W%		'(::99W%		'(::x;;%%/W$.'UYYq\C=O2PT`1``C 
 &&OPPr   model_output_listc                .   t        |      dkD  r|d   n|j                  dd      }t        |      dkD  r|d   n|j                  dd      }|t        |      dkD  r|d   }nt        d      |t        ddd	       |t        ddd
       | j                  | j
                  dz      | j                  | j
                     | j                  | j
                  dz
     }	}}| j                  |      \  }
}| j                  |      \  }}| j                  |	      \  }}	|d   |d   }}||
z  ||z  |	|z  }}}| j                  j                  dk(  rCd } ||||       ||||      z
  } ||||       ||||      z
  }|
||z  ||z  z   ||z  z   z  }|S t        d      )a  
        One step for the second-order multistep DEIS.

        Args:
            model_output_list (`List[torch.Tensor]`):
                The direct outputs from learned diffusion model at current and latter timesteps.
            sample (`torch.Tensor`):
                A current instance of a sample created by the diffusion process.

        Returns:
            `torch.Tensor`:
                The sample tensor at the previous timestep.
        r   timestep_listNr   r   r   r   r   Passing `timestep_list` is deprecated and has no effect as model output conversion is now handled via an internal counter `self.step_index`r   rU   rN   c                     | t        j                  |       t        j                  |       z   dz
  z  t        j                  |      t        j                  |      z
  z  S )Nr   rh   rc   )r   bcs      r   ind_fnzIDEISMultistepScheduler.multistep_deis_second_order_update.<locals>.ind_fn  sC    RVVAYJ2Q67266!9rvvay;PQQr   r   
r   r   r   r
   re   ru   r   rW   r:   r[   )rq   r   r   r   r   r   r   rb   sigma_s0sigma_s1ra   alpha_s0alpha_s1m0m1rho_trho_s0rho_s1r   coef1coef2r   s                         r   "multistep_deis_second_order_updatez9DEISMultistepScheduler.multistep_deis_second_order_updateK  s   ( $'t9q=QfjjRV6W#&t9q=QfjjRV6W>4y1}a !RSS$ ^ $ ^ KK!+,KK(KK!+, $  77@!99(C(!99(C("2&(9"(=B '' 18h3FS[H[vv;;%%/R 5&&1F6664RRE5&&1F6664RREVh.;ebjHICJ%&OPPr   c                   t        |      dkD  r|d   n|j                  dd      }t        |      dkD  r|d   n|j                  dd      }|t        |      dkD  r|d   }nt        d      |t        ddd	       |t        ddd
       | j                  | j
                  dz      | j                  | j
                     | j                  | j
                  dz
     | j                  | j
                  dz
     f\  }}}	}
| j                  |      \  }}| j                  |      \  }}| j                  |	      \  }}	| j                  |
      \  }}
|d   |d   |d   }}}||z  ||z  |	|z  |
|z  f\  }}}}| j                  j                  dk(  rdd } |||||       |||||      z
  } |||||       |||||      z
  } |||||       |||||      z
  }|||z  ||z  z   ||z  z   ||z  z   z  }|S t        d      )a  
        One step for the third-order multistep DEIS.

        Args:
            model_output_list (`List[torch.Tensor]`):
                The direct outputs from learned diffusion model at current and latter timesteps.
            sample (`torch.Tensor`):
                A current instance of a sample created by diffusion process.

        Returns:
            `torch.Tensor`:
                The sample tensor at the previous timestep.
        r   r   Nr   r   r   r   r   r   r   rU   r   rN   c                 6   | t        j                  |      t        j                  |      t        j                  |       z
  dz   z  t        j                  |      t        j                  |       z  z
  t        j                  |      z   t        j                  |       dz  z   dt        j                  |       z  z
  dz   z  }t        j                  |      t        j                  |      z
  t        j                  |      t        j                  |      z
  z  }||z  S )Nr   r   r   )r   r   r   d	numeratordenominators         r   r   zHDEISMultistepScheduler.multistep_deis_third_order_update.<locals>.ind_fn  s    FF1IRVVAY!6!:;ffQi"&&)+,ffQi  ffQi1n% "&&)m	$
 	  "vvay266!94RVVAY9NO ;..r   r   r   )rq   r   r   r   r   r   r   rb   r   r   sigma_s2ra   r   r   alpha_s2r   r   m2r   r   r   rho_s2r   r   r   coef3r   s                              r   !multistep_deis_third_order_updatez8DEISMultistepScheduler.multistep_deis_third_order_update  sf   * $'t9q=QfjjRV6W#&t9q=QfjjRV6W>4y1}a !RSS$ ^ $ ^ KK!+,KK(KK!+,KK!+,	1
-8X  77@!99(C(!99(C(!99(C(&r*,=b,ACTUWCXB gxxx	)
%vvv ;;%%// 5&&&9F66SY[a<bbE5&&&9F66SY[a<bbE5&&&9F66SY[a<bbEVh.;ebjH5SU:UVCJ%&OPPr   c                    || j                   }||k(  j                         }t        |      dk(  rt        | j                         dz
  }|S t        |      dkD  r|d   j                         }|S |d   j                         }|S )Nr   r   )rk   nonzeror   r   )rq   r   schedule_timestepsindex_candidatesru   s        r   index_for_timestepz)DEISMultistepScheduler.index_for_timestep  s    %!%.(:CCE A%T^^,q0J  !"Q&)!,113J  *!,113Jr   c                     | j                   Vt        |t        j                        r%|j	                  | j
                  j                        }| j                  |      | _        y| j                  | _        y)zF
        Initialize the step_index counter for the scheduler.
        N)
rx   
isinstancer#   Tensorrp   rk   r{   r  rn   ro   )rq   r   s     r   _init_step_indexz'DEISMultistepScheduler._init_step_index  sW    
 #(ELL1#;;t~~'<'<=#66x@D#00Dr   r   return_dictc                    | j                   t        d      | j                  | j                  |       | j                  t	        | j
                        dz
  k(  xr0 | j                  j                  xr t	        | j
                        dk  }| j                  t	        | j
                        dz
  k(  xr0 | j                  j                  xr t	        | j
                        dk  }| j                  ||      }t        | j                  j                  dz
        D ]!  }| j                  |dz      | j                  |<   # || j                  d<   | j                  j                  dk(  s| j                  dk  s|r| j                  ||      }ne| j                  j                  dk(  s| j                  dk  s|r| j                  | j                  |      }n| j                  | j                  |      }| j                  | j                  j                  k  r| xj                  dz  c_        | xj                   dz  c_        |s|fS t#        |      S )a  
        Predict the sample from the previous timestep by reversing the SDE. This function propagates the sample with
        the multistep DEIS.

        Args:
            model_output (`torch.Tensor`):
                The direct output from learned diffusion model.
            timestep (`int`):
                The current discrete timestep in the diffusion chain.
            sample (`torch.Tensor`):
                A current instance of a sample created by the diffusion process.
            return_dict (`bool`):
                Whether or not to return a [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`.

        Returns:
            [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`:
                If return_dict is `True`, [`~schedulers.scheduling_utils.SchedulerOutput`] is returned, otherwise a
                tuple is returned where the first element is the sample tensor.

        zaNumber of inference steps is 'None', you need to run 'set_timesteps' after creating the schedulerr      r   r   rU   )prev_sample)rg   r   ru   r  r   rk   rW   r<   r   r    r5   rl   rm   r   r   r  rn   r   )	rq   r   r   r   r  r<   lower_order_secondr*   r  s	            r   stepzDEISMultistepScheduler.step	  s   6 ##+s  ??"!!(+ __DNN 3a 77wT[[=Z=Zw_bcgcqcq_ruw_w 	 __DNN 3a 77wT[[=Z=Zw_bcgcqcq_ruw_w 	 00f0Mt{{//!34 	>A$($6$6q1u$=Dq!	>!-2;;##q(D,A,AA,EIZ66|F6SK[[%%*d.C.Ca.GK]AA$BTBT]cAdK@@ASAS\b@cK  4;;#;#;;!!Q&! 	A>!;77r   c                     |S )a?  
        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the
        current timestep.

        Args:
            sample (`torch.Tensor`):
                The input sample.

        Returns:
            `torch.Tensor`:
                A scaled input sample.
         )rq   r   r   r   s       r   scale_model_inputz(DEISMultistepScheduler.scale_model_inputJ  s	     r   original_samplesnoiserk   c                 *   | j                   j                  |j                  |j                        }|j                  j                  dk(  rvt        j                  |      ra| j                  j                  |j                  t
        j                        }|j                  |j                  t
        j                        }n@| j                  j                  |j                        }|j                  |j                        }| j                   |D cg c]  }| j                  ||       }}nG| j                  | j                  g|j                  d   z  }n| j                  g|j                  d   z  }||   j                         }t        |j                        t        |j                        k  r=|j                  d      }t        |j                        t        |j                        k  r=| j!                  |      \  }	}
|	|z  |
|z  z   }|S c c}w )Nr   mpsr   r   rU   )re   rp   r{   r   typer#   is_floating_pointrk   r%   rx   r  ru   r   flattenr   r   r   )rq   r  r  rk   re   r  r   step_indicesr   ra   rb   noisy_sampless               r   	add_noisez DEISMultistepScheduler.add_noiseZ  s    '7'>'>FVF\F\]""''50U5L5LY5W!%!2!23C3J3JRWR_R_!2!`!%5%<%<EMMRI!%!2!23C3J3J!K!%5%<%<=I #T]^qD33A7IJ^L^__( OO,yq/AAL !,,-	0BBL|$,,.%++%5%;%;!<<OOB'E %++%5%;%;!<<  77>"22Wu_D _s   Hc                 .    | j                   j                  S N)rW   r0   rt   s    r   __len__zDEISMultistepScheduler.__len__|  s    {{...r   )i  g-C6?g{Gz?rG   Nr   r   Fgףp=
?rK   rN   rP   TFFFFrK   rZ   r   Fr~   )r   )NN)333333?r#  r!  )T).__name__
__module____qualname____doc__r   name_compatiblesorderr	   intr   strr   rh   ndarrayboolrr   propertyru   rx   rz   r   r#   r{   r   r  r   r   r   r   r   r   r   r   r   r   r  r  r  r   r   r  r  	IntTensorr  r"  ).0es   00r   r/   r/   N   sR   1f %>>qAFF>LE $("%.2(",1"%$#"&,116*/*/&) *%*,/L, L, L, 	L,
 L,  

+L, L, L, L, %*L,  L, L, L,  L, $D>L,  !)!L," "$#L,$ "$%L,& UO'L,( )L,* +L,, #-L,. /L, L,\     ! !(3 ( hlQ,#&Q,05c5<<6G0HQ,U]^cUdQ,h  D0 ELL RWR^R^ 4 TW \a\h\h . dg<?HM[`	F  $	:Qll:Q 	:Q 
:Q@  $	8ll8 	8 
8|  $	CQ-CQ 	CQ 
CQR  $	VQ-VQ 	VQ 
VQr(
1" !?8ll?8 U\\)*?8 	?8
 ?8 
%	&?8B %,,   ,,  ||  ??	 
 
 D/u ?s   Jr/   )g+?r   )r   typingr   r   r   r   numpyrh   r#   configuration_utilsr   r	   utilsr
   r   scheduling_utilsr   r   r   scipy.statsr   r-   r/   r  r   r   <module>r9     sK   $  / /   A 1 X X  !)4Xo/^[ o/r   