"""
title: FLUX.1-schnell
author: jikkata_plus_claude_4_sonnet
description: Image generation with FLUX.1-schnell based on Hugging Face's ZeroGPU API (black-forest-labs/FLUX.1-schnell).
requirements: gradio_client
version: 0.2.0
licence: MIT
"""
import asyncio
import traceback
import base64
from gradio_client import Client
from pydantic import BaseModel, Field
class Tools:
class Valves(BaseModel):
hf_token: str = Field(
default="",
description="Hugging Face Access Token",
)
def __init__(self):
self.valves = self.Valves()
async def generate_images(
self,
prompt_in_english: str,
seed: float,
randomize_seed: bool,
width: float,
height: float,
num_inference_steps: float,
__event_emitter__=None,
):
"""
Generate an image by providing arguments.
"""
try:
await __event_emitter__(
{
"type": "status",
"data": {
"description": "Generating an image",
"done": False,
},
}
)
# Actually generate the image
image_source = await predict_async(
prompt_in_english,
seed,
randomize_seed,
width,
height,
num_inference_steps,
hf_token=self.valves.hf_token,
)
print(f"Generated image source: {image_source}")
await __event_emitter__(
{
"type": "status",
"data": {
"description": "Processing image",
"done": False,
},
}
)
# Read the generated image file and convert to base64
with open(image_source, "rb") as f:
image_data = f.read()
image_base64 = base64.b64encode(image_data).decode("utf-8")
# Create base64 data URL - this works perfectly with Cloudflare tunnel
markdown_image = (
f""
)
await __event_emitter__(
{
"type": "status",
"data": {
"description": "Generated an image",
"done": True,
},
}
)
await __event_emitter__(
{
"type": "message",
"data": {"content": markdown_image},
}
)
return "Image has been generated and displayed above."
except Exception as e:
print(f"Error in generate_images: {e}")
print(f"Traceback: {traceback.format_exc()}")
await __event_emitter__(
{
"type": "status",
"data": {"description": f"An error occurred: {e}", "done": True},
}
)
return (
f"Tell the user: \n```unhandled error\n{traceback.format_exc()}\n```\n"
)
def predict_sync(
prompt_in_english,
seed,
randomize_seed,
width,
height,
num_inference_steps,
hf_token,
):
client = Client("black-forest-labs/FLUX.1-schnell", hf_token=hf_token)
result = client.predict(
prompt_in_english,
seed,
randomize_seed,
width,
height,
num_inference_steps,
api_name="/infer",
)
return result[0]
async def predict_async(
prompt_in_english,
seed,
randomize_seed,
width,
height,
num_inference_steps,
*,
hf_token,
):
loop = asyncio.get_running_loop()
image_source = await loop.run_in_executor(
None,
predict_sync,
prompt_in_english,
seed,
randomize_seed,
width,
height,
num_inference_steps,
hf_token,
)
return image_source