
    +h                     `    S SK Jr  S SKJrJrJr  S SKrS SKr	SSK
Jr  \ " S S\5      5       rg)    )	dataclass)ListOptionalUnionN   )
BaseOutputc                       \ rS rSr% Sr\\\R                  R                     \	R                  4   \S'   \\\      \S'   \\\\R                  R                     \	R                  4      \S'   \\   \S'   Srg)	!StableDiffusionSafePipelineOutput   a  
Output class for Safe Stable Diffusion pipelines.

Args:
    images (`List[PIL.Image.Image]` or `np.ndarray`)
        List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,
        num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.
    nsfw_content_detected (`List[bool]`)
        List of flags denoting whether the corresponding generated image likely represents "not-safe-for-work"
        (nsfw) content, or `None` if safety checking could not be performed.
    images (`List[PIL.Image.Image]` or `np.ndarray`)
        List of denoised PIL images that were flagged by the safety checker any may contain "not-safe-for-work"
        (nsfw) content, or `None` if no safety check was performed or no images were flagged.
    applied_safety_concept (`str`)
        The safety concept that was applied for safety guidance, or `None` if safety guidance was disabled
imagesnsfw_content_detectedunsafe_imagesapplied_safety_concept N)__name__
__module____qualname____firstlineno____doc__r   r   PILImagenpndarray__annotations__r   boolstr__static_attributes__r       s/home/james-whalen/.local/lib/python3.13/site-packages/diffusers/pipelines/stable_diffusion_safe/pipeline_output.pyr
   r
      si    " $syy'344#DJ//E$syy"7"CDEE$SM)r   r
   )dataclassesr   typingr   r   r   numpyr   	PIL.Imager   utilsr   r
   r   r   r   <module>r%      s4    ! ( (  
 *
 * *r   