
    2hs                      S SK Jr  S SKJrJrJrJrJr  S SKJ	r	J
r
  S SKrSSKJr  SSKJrJrJr  SSKJrJrJrJrJrJr  SS	KJrJrJrJrJr  SS
KJ r   SSK!J"r"J#r#  SSK$J%r%J&r&  SSK'J(r(J)r)  SSK*J+r+  SSK,J-r-  SSK.J/r/  SSK0J1r1  SSK2J3r3  SS/r4 " S S\"5      r5 " S S\#5      r6 " S S5      r7 " S S5      r8 " S S5      r9 " S S5      r:g)    )annotations)ListUnionMappingOptionalcast)LiteraloverloadN   )_legacy_response)image_edit_paramsimage_generate_paramsimage_create_variation_params)	NOT_GIVENBodyQueryHeadersNotGiven	FileTypes)extract_filesrequired_argsmaybe_transformdeepcopy_minimalasync_maybe_transform)cached_property)SyncAPIResourceAsyncAPIResource)to_streamed_response_wrapper"async_to_streamed_response_wrapper)StreamAsyncStream)make_request_options)
ImageModel)ImagesResponse)ImageGenStreamEvent)ImageEditStreamEventImagesAsyncImagesc                     \ rS rSr\SS j5       r\SS j5       r\\\\\SSS\S.	                     SS jjr\	\\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\	\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\	\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\" SS	// SQ5      \\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\	\\\\\\\\\\\\\SSS\S.                                     SS jj5       r\	\\\\\\\\\\\\SSS\S.                                     S S jj5       r\	\\\\\\\\\\\\SSS\S.                                     S!S jj5       r\" S	/S	S
/5      \\\\\\\\\\\\\SSS\S.                                     S"S jj5       rSrg)#r'      c                    [        U 5      $ z
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.

For more information, see https://www.github.com/openai/openai-python#accessing-raw-response-data-eg-headers
)ImagesWithRawResponseselfs    O/var/www/auris/envauris/lib/python3.13/site-packages/openai/resources/images.pywith_raw_responseImages.with_raw_response   s     %T**    c                    [        U 5      $ z
An alternative to `.with_raw_response` that doesn't eagerly read the response body.

For more information, see https://www.github.com/openai/openai-python#with_streaming_response
)ImagesWithStreamingResponser.   s    r0   with_streaming_responseImages.with_streaming_response&   s     +400r3   N	modelnresponse_formatsizeuserextra_headersextra_query
extra_bodytimeoutimagec       
            [        UUUUUUS.5      n[        [        [        [        [
        4   U5      S//S9nSS0U=(       d    0 EnU R                  S[        U[        R                  5      U[        XxXS9[        S9$ )	`  Creates a variation of a given image.

This endpoint only supports `dall-e-2`.

Args:
  image: The image to use as the basis for the variation(s). Must be a valid PNG file,
      less than 4MB, and square.

  model: The model to use for image generation. Only `dall-e-2` is supported at this
      time.

  n: The number of images to generate. Must be between 1 and 10.

  response_format: The format in which the generated images are returned. Must be one of `url` or
      `b64_json`. URLs are only valid for 60 minutes after the image has been
      generated.

  size: The size of the generated images. Must be one of `256x256`, `512x512`, or
      `1024x1024`.

  user: A unique identifier representing your end-user, which can help OpenAI to monitor
      and detect abuse.
      [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

  extra_headers: Send extra headers

  extra_query: Add additional query parameters to the request

  extra_body: Add additional JSON properties to the request

  timeout: Override the client-level default timeout for this request, in seconds
rC   r:   r;   r<   r=   r>   rC   pathsContent-Typemultipart/form-data/images/variationsr?   r@   rA   rB   bodyfilesoptionscast_to)r   r   r   r   strobject_postr   r   ImageCreateVariationParamsr"   r$   r/   rC   r:   r;   r<   r=   r>   r?   r@   rA   rB   rN   rO   s                r0   create_variationImages.create_variation/   s    `  #2	
 d73;#7>ykR ()>X=CVTVXzz  'D'_'_`(+Q[ #  
 	
r3   
backgroundinput_fidelitymaskr:   r;   output_compressionoutput_formatpartial_imagesqualityr<   r=   streamr>   r?   r@   rA   rB   promptra   c                   ga  Creates an edited or extended image given one or more source images and a
prompt.

This endpoint only supports `gpt-image-1` and `dall-e-2`.

Args:
  image: The image(s) to edit. Must be a supported image file or an array of images.

      For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than
      50MB. You can provide up to 16 images.

      For `dall-e-2`, you can only provide one image, and it should be a square `png`
      file less than 4MB.

  prompt: A text description of the desired image(s). The maximum length is 1000
      characters for `dall-e-2`, and 32000 characters for `gpt-image-1`.

  background: Allows to set transparency for the background of the generated image(s). This
      parameter is only supported for `gpt-image-1`. Must be one of `transparent`,
      `opaque` or `auto` (default value). When `auto` is used, the model will
      automatically determine the best background for the image.

      If `transparent`, the output format needs to support transparency, so it should
      be set to either `png` (default value) or `webp`.

  input_fidelity: Control how much effort the model will exert to match the style and features,
      especially facial features, of input images. This parameter is only supported
      for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.

  mask: An additional image whose fully transparent areas (e.g. where alpha is zero)
      indicate where `image` should be edited. If there are multiple images provided,
      the mask will be applied on the first image. Must be a valid PNG file, less than
      4MB, and have the same dimensions as `image`.

  model: The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are
      supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1`
      is used.

  n: The number of images to generate. Must be between 1 and 10.

  output_compression: The compression level (0-100%) for the generated images. This parameter is only
      supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
      defaults to 100.

  output_format: The format in which the generated images are returned. This parameter is only
      supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The
      default value is `png`.

  partial_images: The number of partial images to generate. This parameter is used for streaming
      responses that return partial images. Value must be between 0 and 3. When set to
      0, the response will be a single image sent in one streaming event.

  quality: The quality of the image that will be generated. `high`, `medium` and `low` are
      only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality.
      Defaults to `auto`.

  response_format: The format in which the generated images are returned. Must be one of `url` or
      `b64_json`. URLs are only valid for 60 minutes after the image has been
      generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1`
      will always return base64-encoded images.

  size: The size of the generated images. Must be one of `1024x1024`, `1536x1024`
      (landscape), `1024x1536` (portrait), or `auto` (default value) for
      `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`.

  stream: Edit the image in streaming mode. Defaults to `false`. See the
      [Image generation guide](https://platform.openai.com/docs/guides/image-generation)
      for more information.

  user: A unique identifier representing your end-user, which can help OpenAI to monitor
      and detect abuse.
      [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

  extra_headers: Send extra headers

  extra_query: Add additional query parameters to the request

  extra_body: Add additional JSON properties to the request

  timeout: Override the client-level default timeout for this request, in seconds
N r/   rC   rb   rZ   r[   r\   r:   r;   r]   r^   r_   r`   r<   r=   ra   r>   r?   r@   rA   rB   s                       r0   editImages.editx       X 	r3   rZ   r[   r\   r:   r;   r]   r^   r_   r`   r<   r=   r>   r?   r@   rA   rB   c                   ga  Creates an edited or extended image given one or more source images and a
prompt.

This endpoint only supports `gpt-image-1` and `dall-e-2`.

Args:
  image: The image(s) to edit. Must be a supported image file or an array of images.

      For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than
      50MB. You can provide up to 16 images.

      For `dall-e-2`, you can only provide one image, and it should be a square `png`
      file less than 4MB.

  prompt: A text description of the desired image(s). The maximum length is 1000
      characters for `dall-e-2`, and 32000 characters for `gpt-image-1`.

  stream: Edit the image in streaming mode. Defaults to `false`. See the
      [Image generation guide](https://platform.openai.com/docs/guides/image-generation)
      for more information.

  background: Allows to set transparency for the background of the generated image(s). This
      parameter is only supported for `gpt-image-1`. Must be one of `transparent`,
      `opaque` or `auto` (default value). When `auto` is used, the model will
      automatically determine the best background for the image.

      If `transparent`, the output format needs to support transparency, so it should
      be set to either `png` (default value) or `webp`.

  input_fidelity: Control how much effort the model will exert to match the style and features,
      especially facial features, of input images. This parameter is only supported
      for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.

  mask: An additional image whose fully transparent areas (e.g. where alpha is zero)
      indicate where `image` should be edited. If there are multiple images provided,
      the mask will be applied on the first image. Must be a valid PNG file, less than
      4MB, and have the same dimensions as `image`.

  model: The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are
      supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1`
      is used.

  n: The number of images to generate. Must be between 1 and 10.

  output_compression: The compression level (0-100%) for the generated images. This parameter is only
      supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
      defaults to 100.

  output_format: The format in which the generated images are returned. This parameter is only
      supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The
      default value is `png`.

  partial_images: The number of partial images to generate. This parameter is used for streaming
      responses that return partial images. Value must be between 0 and 3. When set to
      0, the response will be a single image sent in one streaming event.

  quality: The quality of the image that will be generated. `high`, `medium` and `low` are
      only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality.
      Defaults to `auto`.

  response_format: The format in which the generated images are returned. Must be one of `url` or
      `b64_json`. URLs are only valid for 60 minutes after the image has been
      generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1`
      will always return base64-encoded images.

  size: The size of the generated images. Must be one of `1024x1024`, `1536x1024`
      (landscape), `1024x1536` (portrait), or `auto` (default value) for
      `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`.

  user: A unique identifier representing your end-user, which can help OpenAI to monitor
      and detect abuse.
      [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

  extra_headers: Send extra headers

  extra_query: Add additional query parameters to the request

  extra_body: Add additional JSON properties to the request

  timeout: Override the client-level default timeout for this request, in seconds
Nre   r/   rC   rb   ra   rZ   r[   r\   r:   r;   r]   r^   r_   r`   r<   r=   r>   r?   r@   rA   rB   s                       r0   rg   rh      ri   r3   c                   grl   re   rm   s                       r0   rg   rh   T  ri   r3   rC   rb   ra   c               z   [        UUUUUUUUU	U
UUUUUS.5      n[        [        [        [        [
        4   U5      S/SS/S//S9nSS0U=(       d    0 EnU R                  S[        UU(       a  [        R                  O[        R                  5      U[        UUUUS	9[        U=(       d    S
[        [           S9$ N)rC   rb   rZ   r[   r\   r:   r;   r]   r^   r_   r`   r<   r=   ra   r>   rC   z<array>r\   rG   rI   rJ   z/images/editsrL   F)rN   rO   rP   rQ   ra   
stream_cls)r   r   r   r   rR   rS   rT   r   r   ImageEditParamsStreamingImageEditParamsNonStreamingr"   r$   r    r&   r/   rC   rb   rZ   r[   r\   r:   r;   r]   r^   r_   r`   r<   r=   ra   r>   r?   r@   rA   rB   rN   rO   s                         r0   rg   rh     s    6   ("0&8!."0"#2 
& d73;#7>ySZ\eRfiohpFqr ()>X=CVTVXzz >D!::J[JwJw (+Q[el #?U23  
 	
r3   rZ   r:   
moderationr;   r]   r^   r_   r`   r<   r=   ra   styler>   r?   r@   rA   rB   c                   ga  
Creates an image given a prompt.
[Learn more](https://platform.openai.com/docs/guides/images).

Args:
  prompt: A text description of the desired image(s). The maximum length is 32000
      characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters
      for `dall-e-3`.

  background: Allows to set transparency for the background of the generated image(s). This
      parameter is only supported for `gpt-image-1`. Must be one of `transparent`,
      `opaque` or `auto` (default value). When `auto` is used, the model will
      automatically determine the best background for the image.

      If `transparent`, the output format needs to support transparency, so it should
      be set to either `png` (default value) or `webp`.

  model: The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or
      `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to
      `gpt-image-1` is used.

  moderation: Control the content-moderation level for images generated by `gpt-image-1`. Must
      be either `low` for less restrictive filtering or `auto` (default value).

  n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
      `n=1` is supported.

  output_compression: The compression level (0-100%) for the generated images. This parameter is only
      supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
      defaults to 100.

  output_format: The format in which the generated images are returned. This parameter is only
      supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`.

  partial_images: The number of partial images to generate. This parameter is used for streaming
      responses that return partial images. Value must be between 0 and 3. When set to
      0, the response will be a single image sent in one streaming event.

  quality: The quality of the image that will be generated.

      - `auto` (default value) will automatically select the best quality for the
        given model.
      - `high`, `medium` and `low` are supported for `gpt-image-1`.
      - `hd` and `standard` are supported for `dall-e-3`.
      - `standard` is the only option for `dall-e-2`.

  response_format: The format in which generated images with `dall-e-2` and `dall-e-3` are
      returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes
      after the image has been generated. This parameter isn't supported for
      `gpt-image-1` which will always return base64-encoded images.

  size: The size of the generated images. Must be one of `1024x1024`, `1536x1024`
      (landscape), `1024x1536` (portrait), or `auto` (default value) for
      `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and
      one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`.

  stream: Generate the image in streaming mode. Defaults to `false`. See the
      [Image generation guide](https://platform.openai.com/docs/guides/image-generation)
      for more information. This parameter is only supported for `gpt-image-1`.

  style: The style of the generated images. This parameter is only supported for
      `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean
      towards generating hyper-real and dramatic images. Natural causes the model to
      produce more natural, less hyper-real looking images.

  user: A unique identifier representing your end-user, which can help OpenAI to monitor
      and detect abuse.
      [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

  extra_headers: Send extra headers

  extra_query: Add additional query parameters to the request

  extra_body: Add additional JSON properties to the request

  timeout: Override the client-level default timeout for this request, in seconds
Nre   r/   rb   rZ   r:   rw   r;   r]   r^   r_   r`   r<   r=   ra   rx   r>   r?   r@   rA   rB   s                      r0   generateImages.generate      R 	r3   rZ   r:   rw   r;   r]   r^   r_   r`   r<   r=   rx   r>   r?   r@   rA   rB   c                   ga  
Creates an image given a prompt.
[Learn more](https://platform.openai.com/docs/guides/images).

Args:
  prompt: A text description of the desired image(s). The maximum length is 32000
      characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters
      for `dall-e-3`.

  stream: Generate the image in streaming mode. Defaults to `false`. See the
      [Image generation guide](https://platform.openai.com/docs/guides/image-generation)
      for more information. This parameter is only supported for `gpt-image-1`.

  background: Allows to set transparency for the background of the generated image(s). This
      parameter is only supported for `gpt-image-1`. Must be one of `transparent`,
      `opaque` or `auto` (default value). When `auto` is used, the model will
      automatically determine the best background for the image.

      If `transparent`, the output format needs to support transparency, so it should
      be set to either `png` (default value) or `webp`.

  model: The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or
      `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to
      `gpt-image-1` is used.

  moderation: Control the content-moderation level for images generated by `gpt-image-1`. Must
      be either `low` for less restrictive filtering or `auto` (default value).

  n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
      `n=1` is supported.

  output_compression: The compression level (0-100%) for the generated images. This parameter is only
      supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
      defaults to 100.

  output_format: The format in which the generated images are returned. This parameter is only
      supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`.

  partial_images: The number of partial images to generate. This parameter is used for streaming
      responses that return partial images. Value must be between 0 and 3. When set to
      0, the response will be a single image sent in one streaming event.

  quality: The quality of the image that will be generated.

      - `auto` (default value) will automatically select the best quality for the
        given model.
      - `high`, `medium` and `low` are supported for `gpt-image-1`.
      - `hd` and `standard` are supported for `dall-e-3`.
      - `standard` is the only option for `dall-e-2`.

  response_format: The format in which generated images with `dall-e-2` and `dall-e-3` are
      returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes
      after the image has been generated. This parameter isn't supported for
      `gpt-image-1` which will always return base64-encoded images.

  size: The size of the generated images. Must be one of `1024x1024`, `1536x1024`
      (landscape), `1024x1536` (portrait), or `auto` (default value) for
      `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and
      one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`.

  style: The style of the generated images. This parameter is only supported for
      `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean
      towards generating hyper-real and dramatic images. Natural causes the model to
      produce more natural, less hyper-real looking images.

  user: A unique identifier representing your end-user, which can help OpenAI to monitor
      and detect abuse.
      [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

  extra_headers: Send extra headers

  extra_query: Add additional query parameters to the request

  extra_body: Add additional JSON properties to the request

  timeout: Override the client-level default timeout for this request, in seconds
Nre   r/   rb   ra   rZ   r:   rw   r;   r]   r^   r_   r`   r<   r=   rx   r>   r?   r@   rA   rB   s                      r0   r|   r}   o  r~   r3   c                   gr   re   r   s                      r0   r|   r}     r~   r3   c                   U R                  S[        UUUUUUUUU	U
UUUUS.U(       a  [        R                  O[        R                  5      [        UUUUS9[        U=(       d    S[        [           S9$ Nz/images/generations)rb   rZ   r:   rw   r;   r]   r^   r_   r`   r<   r=   ra   rx   r>   rL   F)rN   rP   rQ   ra   rr   )	rT   r   r   ImageGenerateParamsStreamingImageGenerateParamsNonStreamingr"   r$   r    r%   r{   s                      r0   r|   r}   E  s    8 zz! $","",*<%2&4&'6 $" "  &BB*JJ'* )+Q[el #?U129  
 	
r3   re   )returnr-   )r   r6   rC   r   r:   'Union[str, ImageModel, None] | NotGivenr;   Optional[int] | NotGivenr<   /Optional[Literal['url', 'b64_json']] | NotGivenr=   z?Optional[Literal['256x256', '512x512', '1024x1024']] | NotGivenr>   str | NotGivenr?   Headers | Noner@   Query | NonerA   Body | NonerB   'float | httpx.Timeout | None | NotGivenr   r$   (rC   !Union[FileTypes, List[FileTypes]]rb   rR   rZ   =Optional[Literal['transparent', 'opaque', 'auto']] | NotGivenr[   +Optional[Literal['high', 'low']] | NotGivenr\   FileTypes | NotGivenr:   r   r;   r   r]   r   r^   3Optional[Literal['png', 'jpeg', 'webp']] | NotGivenr_   r   r`   IOptional[Literal['standard', 'low', 'medium', 'high', 'auto']] | NotGivenr<   r   r=   aOptional[Literal['256x256', '512x512', '1024x1024', '1536x1024', '1024x1536', 'auto']] | NotGivenra   #Optional[Literal[False]] | NotGivenr>   r   r?   r   r@   r   rA   r   rB   r   r   r$   )(rC   r   rb   rR   ra   Literal[True]rZ   r   r[   r   r\   r   r:   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   zStream[ImageEditStreamEvent])(rC   r   rb   rR   ra   boolrZ   r   r[   r   r\   r   r:   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   -ImagesResponse | Stream[ImageEditStreamEvent])(rC   r   rb   rR   rZ   r   r[   r   r\   r   r:   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   ra   3Optional[Literal[False]] | Literal[True] | NotGivenr>   r   r?   r   r@   r   rA   r   rB   r   r   r   &rb   rR   rZ   r   r:   r   rw   +Optional[Literal['low', 'auto']] | NotGivenr;   r   r]   r   r^   r   r_   r   r`   OOptional[Literal['standard', 'hd', 'low', 'medium', 'high', 'auto']] | NotGivenr<   r   r=   {Optional[Literal['auto', '1024x1024', '1536x1024', '1024x1536', '256x256', '512x512', '1792x1024', '1024x1792']] | NotGivenra   r   rx   0Optional[Literal['vivid', 'natural']] | NotGivenr>   r   r?   r   r@   r   rA   r   rB   r   r   r$   )&rb   rR   ra   r   rZ   r   r:   r   rw   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   rx   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   zStream[ImageGenStreamEvent])&rb   rR   ra   r   rZ   r   r:   r   rw   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   rx   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   ,ImagesResponse | Stream[ImageGenStreamEvent])&rb   rR   rZ   r   r:   r   rw   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   ra   r   rx   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   r   __name__
__module____qualname____firstlineno__r   r1   r7   r   rW   r
   rg   r   r|   __static_attributes__re   r3   r0   r'   r'      s
   + + 1 1 :C&/KTPY( )-$("&;DG
 G
 7	G

 $G
 IG
 NG
 G
 &G
 "G
  G
 9G
 
G
R  U^FO%.9B&/7@MV3<]fKT6?( )-$("&;D1k 1k 	k
 Rk Dk #k 7k $k 5k Kk 1k [k Ikk" 4#k$ %k* &+k, "-k.  /k0 91k2 
3k kZ  U^FO%.9B&/7@MV3<]fKT( )-$("&;D1k 1k 	k
 k Rk Dk #k 7k $k 5k Kk 1k [k Ik !k$ %k* &+k, "-k.  /k0 91k2 
&3k kZ  U^FO%.9B&/7@MV3<]fKT( )-$("&;D1k 1k 	k
 k Rk Dk #k 7k $k 5k Kk 1k [k Ik !k$ %k* &+k, "-k.  /k0 91k2 
73k kZ GX&(EF U^FO%.9B&/7@MV3<]fKTFO( )-$("&;D1?
 1?
 	?

 R?
 D?
 #?
 7?
 $?
 5?
 K?
 1?
 [?
 I?
?
" D#?
$ %?
* &+?
, "-?
.  /?
0 91?
2 
73?
 G?
B 
 U^9BBK&/7@MV3<clKT 6?BK( )-$("&;D3h h R	h
 7h @h $h 5h Kh 1h ah Ihh" 4#h$ @%h& 'h, &-h. "/h0  1h2 93h4 
5h hT  U^9BBK&/7@MV3<clKT BK( )-$("&;D3h h 	h
 Rh 7h @h $h 5h Kh 1h ah Ihh$ @%h& 'h, &-h. "/h0  1h2 93h4 
%5h hT  U^9BBK&/7@MV3<clKT BK( )-$("&;D3h h 	h
 Rh 7h @h $h 5h Kh 1h ah Ihh$ @%h& 'h, &-h. "/h0  1h2 93h4 
65h hT H:(34
 U^9BBK&/7@MV3<clKT FOBK( )-$("&;D38
 8
 R	8

 78
 @8
 $8
 58
 K8
 18
 a8
 I8
8
" D#8
$ @%8
& '8
, &-8
. "/8
0  18
2 938
4 
658
 58
r3   c                     \ rS rSr\SS j5       r\SS j5       r\\\\\SSS\S.	                     SS jjr\	\\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\	\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\	\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\" SS	// SQ5      \\\\\\\\\\\\\SSS\S.                                       SS jj5       r
\	\\\\\\\\\\\\\SSS\S.                                     SS jj5       r\	\\\\\\\\\\\\SSS\S.                                     S S jj5       r\	\\\\\\\\\\\\SSS\S.                                     S!S jj5       r\" S	/S	S
/5      \\\\\\\\\\\\\SSS\S.                                     S"S jj5       rSrg)#r(   i  c                    [        U 5      $ r,   )AsyncImagesWithRawResponser.   s    r0   r1   AsyncImages.with_raw_response  s     *$//r3   c                    [        U 5      $ r5   ) AsyncImagesWithStreamingResponser.   s    r0   r7   #AsyncImages.with_streaming_response  s     055r3   Nr9   rC   c       
        0  #    [        UUUUUUS.5      n[        [        [        [        [
        4   U5      S//S9nSS0U=(       d    0 EnU R                  S[        U[        R                  5      I Sh  vN U[        XxXS9[        S	9I Sh  vN $  N N7f)
rE   rF   rC   rG   rI   rJ   rK   NrL   rM   )r   r   r   r   rR   rS   rT   r   r   rU   r"   r$   rV   s                r0   rW   AsyncImages.create_variation  s     `  #2	
 d73;#7>ykR ()>X=CVTVXZZ ,T3P3k3kll(+Q[ #   
 
 	
l
s$   A2B4B
5BBBBrY   rb   ra   c                  #    g7frd   re   rf   s                       r0   rg   AsyncImages.edit       X 	   rj   c                  #    g7frl   re   rm   s                       r0   rg   r   L  r   r   c                  #    g7frl   re   rm   s                       r0   rg   r     r   r   ro   c                 #    [        UUUUUUUUU	U
UUUUUS.5      n[        [        [        [        [
        4   U5      S/SS/S//S9nSS0U=(       d    0 EnU R                  S[        UU(       a  [        R                  O[        R                  5      I S h  vN U[        UUUUS	9[        U=(       d    S
[        [           S9I S h  vN $  N5 N7frq   )r   r   r   r   rR   rS   rT   r   r   rs   rt   r"   r$   r!   r&   ru   s                         r0   rg   r   (  s    6   ("0&8!."0"#2 
& d73;#7>ySZ\eRfiohpFqr ()>X=CVTVXZZ,>D!::J[JwJw  (+Q[el #?U"#78   
 
 	

s$   BCC
0C
CCCrv   c                  #    g7frz   re   r{   s                      r0   r|   AsyncImages.generatej       R 	r   r   c                  #    g7fr   re   r   s                      r0   r|   r     r   r   c                  #    g7fr   re   r   s                      r0   r|   r   @  r   r   c                 #    U R                  S[        UUUUUUUUU	U
UUUUS.U(       a  [        R                  O[        R                  5      I S h  vN [        UUUUS9[        U=(       d    S[        [           S9I S h  vN $  N4 N7fr   )	rT   r   r   r   r   r"   r$   r!   r%   r{   s                      r0   r|   r     s     8 ZZ!,$","",*<%2&4&'6 $" "  &BB*JJ' * )+Q[el #?U"#679   
 
 	

s$   ABB
/BB	B	Bre   )r   r   )r   r   r   r   )(rC   r   rb   rR   ra   r   rZ   r   r[   r   r\   r   r:   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   z!AsyncStream[ImageEditStreamEvent])(rC   r   rb   rR   ra   r   rZ   r   r[   r   r\   r   r:   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   2ImagesResponse | AsyncStream[ImageEditStreamEvent])(rC   r   rb   rR   rZ   r   r[   r   r\   r   r:   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   ra   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   r   r   )&rb   rR   ra   r   rZ   r   r:   r   rw   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   rx   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   z AsyncStream[ImageGenStreamEvent])&rb   rR   ra   r   rZ   r   r:   r   rw   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   rx   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   1ImagesResponse | AsyncStream[ImageGenStreamEvent])&rb   rR   rZ   r   r:   r   rw   r   r;   r   r]   r   r^   r   r_   r   r`   r   r<   r   r=   r   ra   r   rx   r   r>   r   r?   r   r@   r   rA   r   rB   r   r   r   r   re   r3   r0   r(   r(     s
   0 0 6 6 :C&/KTPY( )-$("&;DG
 G
 7	G

 $G
 IG
 NG
 G
 &G
 "G
  G
 9G
 
G
R  U^FO%.9B&/7@MV3<]fKT6?( )-$("&;D1k 1k 	k
 Rk Dk #k 7k $k 5k Kk 1k [k Ikk" 4#k$ %k* &+k, "-k.  /k0 91k2 
3k kZ  U^FO%.9B&/7@MV3<]fKT( )-$("&;D1k 1k 	k
 k Rk Dk #k 7k $k 5k Kk 1k [k Ik !k$ %k* &+k, "-k.  /k0 91k2 
+3k kZ  U^FO%.9B&/7@MV3<]fKT( )-$("&;D1k 1k 	k
 k Rk Dk #k 7k $k 5k Kk 1k [k Ik !k$ %k* &+k, "-k.  /k0 91k2 
<3k kZ GX&(EF U^FO%.9B&/7@MV3<]fKTFO( )-$("&;D1?
 1?
 	?

 R?
 D?
 #?
 7?
 $?
 5?
 K?
 1?
 [?
 I?
?
" D#?
$ %?
* &+?
, "-?
.  /?
0 91?
2 
<3?
 G?
B 
 U^9BBK&/7@MV3<clKT 6?BK( )-$("&;D3h h R	h
 7h @h $h 5h Kh 1h ah Ihh" 4#h$ @%h& 'h, &-h. "/h0  1h2 93h4 
5h hT  U^9BBK&/7@MV3<clKT BK( )-$("&;D3h h 	h
 Rh 7h @h $h 5h Kh 1h ah Ihh$ @%h& 'h, &-h. "/h0  1h2 93h4 
*5h hT  U^9BBK&/7@MV3<clKT BK( )-$("&;D3h h 	h
 Rh 7h @h $h 5h Kh 1h ah Ihh$ @%h& 'h, &-h. "/h0  1h2 93h4 
;5h hT H:(34
 U^9BBK&/7@MV3<clKT FOBK( )-$("&;D38
 8
 R	8

 78
 @8
 $8
 58
 K8
 18
 a8
 I8
8
" D#8
$ @%8
& '8
, &-8
. "/8
0  18
2 938
4 
;58
 58
r3   c                      \ rS rSrSS jrSrg)r-   i  c                    Xl         [        R                  " UR                  5      U l        [        R                  " UR                  5      U l        [        R                  " UR
                  5      U l        g N)_imagesr   to_raw_response_wrapperrW   rg   r|   r/   imagess     r0   __init__ImagesWithRawResponse.__init__  sY     0 H H##!
 %<<KK
	 )@@OO
r3   r   rW   rg   r|   Nr   r'   r   Noner   r   r   r   r   r   re   r3   r0   r-   r-         
r3   r-   c                      \ rS rSrSS jrSrg)r   i  c                    Xl         [        R                  " UR                  5      U l        [        R                  " UR                  5      U l        [        R                  " UR
                  5      U l        g r   )r   r   async_to_raw_response_wrapperrW   rg   r|   r   s     r0   r   #AsyncImagesWithRawResponse.__init__  sY     0 N N##!
 %BBKK
	 )FFOO
r3   r   Nr   r(   r   r   r   re   r3   r0   r   r     r   r3   r   c                      \ rS rSrSS jrSrg)r6   i  c                    Xl         [        UR                  5      U l        [        UR                  5      U l        [        UR                  5      U l        g r   )r   r   rW   rg   r|   r   s     r0   r   $ImagesWithStreamingResponse.__init__  sG     <##!
 1KK
	 5OO
r3   r   Nr   r   re   r3   r0   r6   r6     r   r3   r6   c                      \ rS rSrSS jrSrg)r   i  c                    Xl         [        UR                  5      U l        [        UR                  5      U l        [        UR                  5      U l        g r   )r   r   rW   rg   r|   r   s     r0   r   )AsyncImagesWithStreamingResponse.__init__  sG     B##!
 7KK
	 ;OO
r3   r   Nr   r   re   r3   r0   r   r     r   r3   r   );
__future__r   typingr   r   r   r   r   typing_extensionsr	   r
   httpx r   typesr   r   r   _typesr   r   r   r   r   r   _utilsr   r   r   r   r   _compatr   	_resourcer   r   	_responser   r   
_streamingr    r!   _base_clientr"   types.image_modelr#   types.images_responser$   types.image_gen_stream_eventr%   types.image_edit_stream_eventr&   __all__r'   r(   r-   r   r6   r   re   r3   r0   <module>r      s    # 7 7 /   [ [ I I k k % 9 X , / * 2 > @]
#c
_ c
Lc
" c
L
 

 

 

 
r3   