
    bi*w                     "   d dl Z d dlmZmZmZmZmZ d dlZd dlZd dl	Z	d dl
mZmZ ddlmZ ddlmZ ddlmZmZ ddlmZ dd	lmZmZmZmZ dd
lmZ ddlmZmZ  e       rd dl m!c m"Z# dZ$ndZ$ ejJ                  e&      Z'dZ( G d dee      Z)y)    N)CallableDictListOptionalUnion)T5EncoderModelT5Tokenizer   )VaeImageProcessor)StableDiffusionLoraLoaderMixin)Kandinsky3UNetVQModel)DDPMScheduler)	deprecateis_torch_xla_availableloggingreplace_example_docstring)randn_tensor   )DiffusionPipelineImagePipelineOutputTFa?  
    Examples:
        ```py
        >>> from diffusers import AutoPipelineForImage2Image
        >>> from diffusers.utils import load_image
        >>> import torch

        >>> pipe = AutoPipelineForImage2Image.from_pretrained(
        ...     "kandinsky-community/kandinsky-3", variant="fp16", torch_dtype=torch.float16
        ... )
        >>> pipe.enable_model_cpu_offload()

        >>> prompt = "A painting of the inside of a subway train with tiny raccoons."
        >>> image = load_image(
        ...     "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky3/t2i.png"
        ... )

        >>> generator = torch.Generator(device="cpu").manual_seed(0)
        >>> image = pipe(prompt, image=image, strength=0.75, num_inference_steps=25, generator=generator).images[0]
        ```
c            %           e Zd ZdZg dZdedededede	f
 fdZ
d	 Zd
 Z ej                         	 	 	 	 	 	 	 	 	 d+deej                      deej                      deej                      deej                      fd       Zd,dZd Z	 	 	 	 	 	 d-dZed        Zed        Zed        Z ej                          ee      ddddddddddddddddgfdeeee   f   deej                   ej>                  j>                  eej                      eej>                  j>                     f   d e d!e!d"e d#eeeee   f      d$ee!   d%eeejD                  eejD                     f      deej                      deej                      deej                      deej                      d&ee   d'e#d(ee$e!e!e%gdf      d)ee   f d*              Z& xZ'S ).Kandinsky3Img2ImgPipelineztext_encoder->movq->unet->movq)latentsprompt_embedsnegative_prompt_embedsnegative_attention_maskattention_mask	tokenizertext_encoderunet	schedulermovqc                 P   t         |           | j                  |||||       t        | dd       r/dt	        | j
                  j                  j                        dz
  z  nd}t        | dd       r | j
                  j                  j                  nd}t        ||dd      | _
        y )	N)r   r    r!   r"   r#   r#   r            bicubic)vae_scale_factorvae_latent_channelsresamplereducing_gap)super__init__register_modulesgetattrlenr#   configblock_out_channelslatent_channelsr   image_processor)	selfr   r    r!   r"   r#   movq_scale_factormovq_latent_channels	__class__s	           u/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/diffusers/pipelines/kandinsky3/pipeline_kandinsky3_img2img.pyr.   z"Kandinsky3Img2ImgPipeline.__init__B   s     	lQZae 	 	
 T[[_agimSnA#dii&6&6&I&I"JQ"NOtuCJ4QWY]C^tyy//??de0. 4	 
    c                     t        t        ||z        |      }t        ||z
  d      }| j                  j                  |d  }|||z
  fS )Nr   )minintmaxr"   	timesteps)r6   num_inference_stepsstrengthdeviceinit_timestept_startr@   s          r:   get_timestepsz'Kandinsky3Img2ImgPipeline.get_timestepsX   sS    C 3h >?ATU)M91=NN,,WX6	-777r;   c                     |rYt        j                  ||dk(           ||dk(  <   |j                  d      j                         dz   }|d d d |f   }|d d d |f   }||fS )Nr   r%   )torch
zeros_likesumr?   )r6   
embeddingsr   cut_contextmax_seq_lengths        r:   _process_embedsz)Kandinsky3Img2ImgPipeline._process_embedsa   sy    .3.>.>z.\]J]?^._J~*++//3779A=N#A$67J+A,>?N>))r;   Tr%   Nr   r   r   r   c                    |<|:t        |      t        |      ur$t        dt        |       dt        |       d      || j                  }|t        |t              rd}n-|t        |t
              rt        |      }n|j                  d   }d}|| j                  |d|d	d
      }|j                  j                  |      }|j                  j                  |      }	| j                  ||	      }|d   }| j                  ||	|      \  }}	||	j                  d      z  }| j                  | j                  j                  }nd}|j                  ||      }|j                  \  }}}|j!                  d|d      }|j#                  ||z  |d      }|	j!                  |d      }	|r'|$|dg|z  }nEt        |t              r|g}n1|t        |      k7  r!t%        d| dt        |       d| d| d	      |}|| j                  |ddd	d	d
      }|j                  j                  |      }|j                  j                  |      }
| j                  ||
      }|d   }|ddd|j                  d   f   }|
ddd|j                  d   f   }
||
j                  d      z  }n*t'        j(                  |      }t'        j(                  |	      }
|rw|j                  d   }|j                  ||      }|j                  |j                  k7  r@|j!                  d|d      }|j#                  ||z  |d      }|
j!                  |d      }
nd}d}
|||	|
fS )aY  
        Encodes the prompt into text encoder hidden states.

        Args:
             prompt (`str` or `List[str]`, *optional*):
                prompt to be encoded
            device: (`torch.device`, *optional*):
                torch device to place the resulting embeddings on
            num_images_per_prompt (`int`, *optional*, defaults to 1):
                number of images that should be generated per prompt
            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):
                whether to use classifier free guidance or not
            negative_prompt (`str` or `List[str]`, *optional*):
                The prompt or prompts not to guide the image generation. If not defined, one has to pass
                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.
                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).
            prompt_embeds (`torch.Tensor`, *optional*):
                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not
                provided, text embeddings will be generated from `prompt` input argument.
            negative_prompt_embeds (`torch.Tensor`, *optional*):
                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt
                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input
                argument.
            attention_mask (`torch.Tensor`, *optional*):
                Pre-generated attention mask. Must provide if passing `prompt_embeds` directly.
            negative_attention_mask (`torch.Tensor`, *optional*):
                Pre-generated negative attention mask. Must provide if passing `negative_prompt_embeds` directly.
        Nz?`negative_prompt` should be the same type to `prompt`, but got z != .r%   r      
max_lengthTpt)paddingrS   
truncationreturn_tensors)r   r   dtyperC   rH    z`negative_prompt`: z has batch size z, but `prompt`: zT. Please make sure that passed `negative_prompt` matches the batch size of `prompt`.)rU   rS   rV   return_attention_maskrW   )type	TypeError_execution_device
isinstancestrlistr1   shaper   	input_idstor   r    rO   	unsqueezerY   repeatview
ValueErrorrI   rJ   )r6   promptdo_classifier_free_guidancenum_images_per_promptrC   negative_promptr   r   _cut_contextr   r   
batch_sizerS   text_inputstext_input_idsrY   bs_embedseq_len_uncond_tokensuncond_inputs                        r:   encode_promptz'Kandinsky3Img2ImgPipeline.encode_promptj   s
   T /"=F|4#88UVZ[jVkUl mV~Q( 
 >++F*VS"9JJvt$<VJ&,,Q/J
 ..$%# ) K )2255f=N(77::6BN --- . M *!,M,0,@,@P^`l,m)M>)N,D,DQ,GGM(%%++EE%((uV(D,22'1%,,Q0EqI%**86K+KWVXY'../DaH&+A+I &!#z 1OS1!0 1s?33 )/)::J3K_J` ax/
| <33  !0*#~~!("#*.#'  .   ".!7!7!:!:6!B*6*E*E*H*H*P')-):):"#: *; *& *@)B&)?C[]EXEXYZE[C[@[)\&*A!E]}GZGZ[\G]E]B]*^')?BYBcBcdeBf)f& */)9)9-)H&*/*:*:>*J'&,2215G%;%>%>USY%>%Z"%++}/B/BB)?)F)FqJ_ab)c&)?)D)DZRgEgiprt)u&*A*H*HI^`a*b' &*"&*#4nF]]]r;   c           	         t        |t        j                  t        j                  j                  t
        f      st        dt        |             |j                  ||      }||z  }|j                  d   dk(  r|}nt        |t
              r)t        |      |k7  rt        dt        |       d| d      t        |t
              rkt        |      D 	cg c]?  }	| j                  j                  ||	|	dz          j                  j                  ||	         A }}	t        j                   |d	      }n4| j                  j                  |      j                  j                  |      }| j                  j"                  j$                  |z  }t        j                   |gd	      }|j                  }
t'        |
|||
      }| j(                  j+                  |||      }|}|S c c}	w )NzK`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is )rC   rY   r%   r'   z/You have passed a list of generators of length z+, but requested an effective batch size of z@. Make sure the batch size matches the length of the generators.r   dim)	generatorrC   rY   )r_   rI   TensorPILImagera   rh   r\   rd   rb   r1   ranger#   encodelatent_distsamplecatr2   scaling_factorr   r"   	add_noise)r6   imagetimesteprn   rk   rY   rC   rz   init_latentsirb   noiser   s                r:   prepare_latentsz)Kandinsky3Img2ImgPipeline.prepare_latents   s   %%,,		!FG]^bch^i]jk  e4"77
;;q>Q L )T*s9~/K Ec)nEU V  *|+km 
 It,afgqar \]DII$$U1q1u%56BBII)TU,W     %yy1=#yy//6BBII)T99++::\ILyy,Q7""UieT ~~//eXN' s   AGc                 V   dt        t        j                  | j                  j                        j
                  j                               v }i }|r||d<   dt        t        j                  | j                  j                        j
                  j                               v }|r||d<   |S )Netarz   )setinspect	signaturer"   step
parameterskeys)r6   rz   r   accepts_etaextra_step_kwargsaccepts_generators         r:   prepare_extra_step_kwargsz3Kandinsky3Img2ImgPipeline.prepare_extra_step_kwargs+  s     s7#4#4T^^5H5H#I#T#T#Y#Y#[\\'*e$ (3w/@/@ATAT/U/`/`/e/e/g+hh-6k*  r;   c	           
          |0t        |t              r|dk  rt        d| dt        |       d      |Lt	         fd|D              s8t        d j
                   d|D 	cg c]  }	|	 j
                  vs|	 c}	       ||t        d| d	| d
      ||t        d      |7t        |t              s't        |t              st        dt        |             ||t        d| d| d
      |A|?|j                  |j                  k7  r&t        d|j                   d|j                   d      ||t        d      |G|E|j                  d d |j                  k7  r)t        d|j                  d d  d|j                   d      ||t        d      |I|F|j                  d d |j                  k7  r)t        d|j                  d d  d|j                   d      y y y c c}	w )Nr   z5`callback_steps` has to be a positive integer but is z	 of type rQ   c              3   :   K   | ]  }|j                   v   y wN_callback_tensor_inputs.0kr6   s     r:   	<genexpr>z9Kandinsky3Img2ImgPipeline.check_inputs.<locals>.<genexpr>M  #      F
23A---F
   2`callback_on_step_end_tensor_inputs` has to be in , but found zCannot forward both `prompt`: z and `prompt_embeds`: z2. Please make sure to only forward one of the two.zeProvide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.z2`prompt` has to be of type `str` or `list` but is z'Cannot forward both `negative_prompt`: z and `negative_prompt_embeds`: zu`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but got: `prompt_embeds` z != `negative_prompt_embeds` zLPlease provide `negative_attention_mask` along with `negative_prompt_embeds`r   z`negative_prompt_embeds` and `negative_attention_mask` must have the same batch_size and token length when passed directly, but got: `negative_prompt_embeds` z != `negative_attention_mask` z:Please provide `attention_mask` along with `prompt_embeds`z`prompt_embeds` and `attention_mask` must have the same batch_size and token length when passed directly, but got: `prompt_embeds` z != `attention_mask` )	r_   r>   rh   r\   allr   r`   ra   rb   )
r6   ri   callback_stepsrl   r   r   "callback_on_step_end_tensor_inputsr   r   r   s
   `         r:   check_inputsz&Kandinsky3Img2ImgPipeline.check_inputs<  s     %z.#/NR`deReGGW X(), 
 .9# F
7YF
 C
 DTEaEaDbbn  |^  pHvw  bc  ko  kG  kG  bGpq  pH  oI  J  -";08N}o ^0 0  ^ 5w  FC)@TZ\`IaQRVW]R^Q_`aa&+A+M9/9J K*++]_ 
 $)?)K""&<&B&BB --:-@-@,A B.445Q8  "-2I2Qkll!-2I2U%++BQ/3J3P3PP 66L6R6RSUTU6V5W X/556a9  $)?YZZ$)C""2A&.*>*>> --:-@-@!-D,E F&,,-Q0  ? *D$W pHs   !G,5G,c                     | j                   S r   _guidance_scaler6   s    r:   guidance_scalez(Kandinsky3Img2ImgPipeline.guidance_scale  s    ###r;   c                      | j                   dkD  S )Nr%   r   r   s    r:   rj   z5Kandinsky3Img2ImgPipeline.do_classifier_free_guidance  s    ##a''r;   c                     | j                   S r   )_num_timestepsr   s    r:   num_timestepsz'Kandinsky3Img2ImgPipeline.num_timesteps  s    """r;   g333333?   g      @pilr   ri   r   rB   rA   r   rl   rk   rz   output_typereturn_dictcallback_on_step_endr   c                 
    |j                  dd      }|j                  dd      }|t        ddd       |t        ddd       |Lt         fd|D              s8t        d j                   d	|D cg c]  }| j                  vs| c}       d
} j                  ||||	|
|||       | _        |t        |t              rd}n-|t        |t              rt        |      }n|	j                  d   } j                  } j                  | j                  ||||	|
|||
      \  }	}
}} j                  r<t        j                   |
|	g      }	t        j                   ||g      j#                         }t        |t              s|g}t        d |D              s&t        d|D cg c]  }t%        |       c} d      t        j                   |D cg c]  } j&                  j)                  |       c}d      }|j+                  |	j,                  |      } j.                  j1                  ||        j3                  |||      \  }} j4                  j7                  |      d   }|j9                  |d      }|dd j;                  ||z        } j=                  |||||	j,                  ||      }t?         d      r& j@                   j@                  jC                          t        |      | j.                  jD                  z  z
  }t        |       _#         jI                  |      5 }tK        |      D ]  \  }} j                  rt        j                   |gdz        n|} jM                  |||	|      d   }  j                  r"| jO                  d      \  }!}"|dz   |"z  ||!z  z
  }  j.                  jQ                  | |||      jR                  }|~i }#|D ]  }tU               |   |#|<     | |||#      }$|$j                  d|      }|$j                  d|	      }	|$j                  d|
      }
|$j                  d|      }|$j                  d|      }|t        |      dz
  k(  s'|dz   |kD  r]|dz    j.                  jD                  z  dk(  r>|jW                          |,||z  dk(  r$|tY         j.                  dd      z  }% ||%||       tZ        st]        j^                           |d k(  s= j4                  ja                  |d
!      d"   } j&                  jc                  ||      }n|} je                          |s|fcddd       S tg        |#      cddd       S c c}w c c}w c c}w # 1 sw Y   yxY w)$a/  
        Function invoked when calling the pipeline for generation.

        Args:
            prompt (`str` or `List[str]`, *optional*):
                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.
                instead.
            image (`torch.Tensor`, `PIL.Image.Image`, `np.ndarray`, `List[torch.Tensor]`, `List[PIL.Image.Image]`, or `List[np.ndarray]`):
                `Image`, or tensor representing an image batch, that will be used as the starting point for the
                process.
            strength (`float`, *optional*, defaults to 0.8):
                Indicates extent to transform the reference `image`. Must be between 0 and 1. `image` is used as a
                starting point and more noise is added the higher the `strength`. The number of denoising steps depends
                on the amount of noise initially added. When `strength` is 1, added noise is maximum and the denoising
                process runs for the full number of iterations specified in `num_inference_steps`. A value of 1
                essentially ignores `image`.
            num_inference_steps (`int`, *optional*, defaults to 50):
                The number of denoising steps. More denoising steps usually lead to a higher quality image at the
                expense of slower inference.
            guidance_scale (`float`, *optional*, defaults to 3.0):
                Guidance scale as defined in [Classifier-Free Diffusion
                Guidance](https://huggingface.co/papers/2207.12598). `guidance_scale` is defined as `w` of equation 2.
                of [Imagen Paper](https://huggingface.co/papers/2205.11487). Guidance scale is enabled by setting
                `guidance_scale > 1`. Higher guidance scale encourages to generate images that are closely linked to
                the text `prompt`, usually at the expense of lower image quality.
            negative_prompt (`str` or `List[str]`, *optional*):
                The prompt or prompts not to guide the image generation. If not defined, one has to pass
                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is
                less than `1`).
            num_images_per_prompt (`int`, *optional*, defaults to 1):
                The number of images to generate per prompt.
            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):
                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)
                to make generation deterministic.
            prompt_embeds (`torch.Tensor`, *optional*):
                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not
                provided, text embeddings will be generated from `prompt` input argument.
            negative_prompt_embeds (`torch.Tensor`, *optional*):
                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt
                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input
                argument.
            attention_mask (`torch.Tensor`, *optional*):
                Pre-generated attention mask. Must provide if passing `prompt_embeds` directly.
            negative_attention_mask (`torch.Tensor`, *optional*):
                Pre-generated negative attention mask. Must provide if passing `negative_prompt_embeds` directly.
            output_type (`str`, *optional*, defaults to `"pil"`):
                The output format of the generate image. Choose between
                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.
            return_dict (`bool`, *optional*, defaults to `True`):
                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.
            callback_on_step_end (`Callable`, *optional*):
                A function that calls at the end of each denoising steps during the inference. The function is called
                with the following arguments: `callback_on_step_end(self: DiffusionPipeline, step: int, timestep: int,
                callback_kwargs: Dict)`. `callback_kwargs` will include a list of all tensors as specified by
                `callback_on_step_end_tensor_inputs`.
            callback_on_step_end_tensor_inputs (`List`, *optional*):
                The list of tensor inputs for the `callback_on_step_end` function. The tensors specified in the list
                will be passed as `callback_kwargs` argument. You will only be able to include variables listed in the
                `._callback_tensor_inputs` attribute of your pipeline class.

        Examples:

        Returns:
            [`~pipelines.ImagePipelineOutput`] or `tuple`

        callbackNr   z1.0.0zhPassing `callback` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`znPassing `callback_steps` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`c              3   :   K   | ]  }|j                   v   y wr   r   r   s     r:   r   z5Kandinsky3Img2ImgPipeline.__call__.<locals>.<genexpr>  r   r   r   r   Tr%   r   )rk   rC   rl   r   r   rm   r   r   c              3      K   | ]8  }t        |t        j                  j                  t        j                  f       : y wr   )r_   r|   r}   rI   r{   )r   r   s     r:   r   z5Kandinsky3Img2ImgPipeline.__call__.<locals>.<genexpr>*  s(     Qa:a#))//5<<!@AQs   >A zInput is in incorrect format: z:. Currently, we only support  PIL image and pytorch tensorrx   rX   )rC   r   text_encoder_offload_hook)totalr   )encoder_hidden_statesencoder_attention_maskg      ?)rz   r   r   r   r   orderlatent)force_not_quantizer   )images)4popr   r   rh   r   r   r   r_   r`   ra   r1   rb   r^   rv   rj   rI   r   boolr\   r5   
preprocessrd   rY   r"   set_timestepsrF   r#   r   repeat_interleaverf   r   hasattrr   offloadr   r   progress_bar	enumerater!   chunkr   prev_samplelocalsupdater0   XLA_AVAILABLExm	mark_stepdecodepostprocessmaybe_free_model_hooksr   )&r6   ri   r   rB   rA   r   rl   rk   rz   r   r   r   r   r   r   r   r   kwargsr   r   r   rM   rn   rC   r   r@   r   latent_timestepnum_warmup_stepsr   tlatent_model_input
noise_prednoise_pred_uncondnoise_pred_textcallback_kwargscallback_outputsstep_idxs&   `                                     r:   __call__z"Kandinsky3Img2ImgPipeline.__call__  s+   p ::j$/$4d;z
 %  A .9# F
7YF
 C
 DTEaEaDbbn  |^  pHvw  bc  ko  kG  kG  bGpq  pH  oI  J  ".#		
  .*VS"9JJvt$<VJ&,,Q/J'' Z^YkYk,,"7+'#9$)$; Zl Z
V-~?V ++!II'=}&MNM"YY(?'PQVVXN%&GEQ5QQ051Ia$q'1I0J  KE  F  		uM!4//::1=MSTU}226B$$%8$H)-););<OQY[a)b&	&))""5))4++,Aq+I#BQ-..z<Q/QR&&_j2GI\I\^dfo
 445$:X:X:d**224 y>,?$..BVBV,VV!)n%89 9	5\!), *#1AEAaAaUYYy1}%=gn" "YY&*7+9	 ' 
 
 339C9I9I!9L6%"03"6/!IN]nLn!nJ ..--'	 . 
 +  (3&(O? 9-3Xa[*9';D!Q'X$.229gFG$4$8$8-$XM-=-A-ABZ\r-s*%5%9%9:JN%[N.>.B.BC\^u.v+I**A9I/IqSTuX\XfXfXlXlNlpqNq '')+N0Ba0G#$(K#K 1g6 LLNU*#Z (*		((T(J8T,,88L'')xo9	5 9	5r 'e4s9	5 9	5K pH` 2J N$9	5 9	5s2   1U#U#"U(
"U-.F%U2A/U2U22U;)	Tr%   NNNNFNNr   )NNNNNN)(__name__
__module____qualname__model_cpu_offload_seqr   r	   r   r   r   r   r.   rF   rO   rI   no_gradr   r{   rv   r   r   r   propertyr   rj   r   r   EXAMPLE_DOC_STRINGr   r`   r   r|   r}   floatr>   	Generatorr   r   r   r   __classcell__)r9   s   @r:   r   r   8   s   <

 %
 	

 !
 
,8* U]]_ %)049=15:>S^  -S^ !) 6S^ !.S^ "*%,,!7S^ S^j(V!* #+/ $FP $ $ ( ( # # U]]_12 )-ae#% #;?/0MQ049=15:>%* KO9B#h5c49n%h5 U\\399??D4FSYY__H]]^h5 	h5
 !h5 h5 "%T#Y"78h5  (}h5 E%//43H"HIJh5  -h5 !) 6h5 !.h5 "*%,,!7h5 c]h5 h5  'xc40@$0F'GH!h5" -1I#h5 3 h5r;   r   )*r   typingr   r   r   r   r   r|   	PIL.ImagerI   transformersr   r	   r5   r   loadersr   modelsr   r   
schedulersr   utilsr   r   r   r   utils.torch_utilsr   pipeline_utilsr   r   torch_xla.core.xla_modelcore	xla_modelr   r   
get_loggerr   loggerr   r    r;   r:   <module>r      s|     8 8 
   4 0 5 - '  . C ))MM			H	% .B	5 13Q B	5r;   