summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/nexra/NexraSD21.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2024-10-15 11:51:53 +0200
committerGitHub <noreply@github.com>2024-10-15 11:51:53 +0200
commit5ed3467d07181e876d957984c16782d687abd3b5 (patch)
tree23bd0fd3481d81fca70ac3c7842cb7ffa8f6497f /g4f/Provider/nexra/NexraSD21.py
parentMerge pull request #2268 from yjg30737/patch-1 (diff)
parentUpdated(docs/client.md) (diff)
downloadgpt4free-5ed3467d07181e876d957984c16782d687abd3b5.tar
gpt4free-5ed3467d07181e876d957984c16782d687abd3b5.tar.gz
gpt4free-5ed3467d07181e876d957984c16782d687abd3b5.tar.bz2
gpt4free-5ed3467d07181e876d957984c16782d687abd3b5.tar.lz
gpt4free-5ed3467d07181e876d957984c16782d687abd3b5.tar.xz
gpt4free-5ed3467d07181e876d957984c16782d687abd3b5.tar.zst
gpt4free-5ed3467d07181e876d957984c16782d687abd3b5.zip
Diffstat (limited to 'g4f/Provider/nexra/NexraSD21.py')
-rw-r--r--g4f/Provider/nexra/NexraSD21.py75
1 files changed, 75 insertions, 0 deletions
diff --git a/g4f/Provider/nexra/NexraSD21.py b/g4f/Provider/nexra/NexraSD21.py
new file mode 100644
index 00000000..46cd6611
--- /dev/null
+++ b/g4f/Provider/nexra/NexraSD21.py
@@ -0,0 +1,75 @@
+from __future__ import annotations
+
+import json
+from aiohttp import ClientSession
+from ...image import ImageResponse
+
+from ...typing import AsyncResult, Messages
+from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
+
+
+class NexraSD21(AsyncGeneratorProvider, ProviderModelMixin):
+ label = "Nexra Stable Diffusion 2.1"
+ url = "https://nexra.aryahcr.cc/documentation/stable-diffusion/en"
+ api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
+ working = False
+
+ default_model = 'stablediffusion-2.1'
+ models = [default_model]
+
+ model_aliases = {
+ "sd-2.1": "stablediffusion-2.1",
+ }
+
+ @classmethod
+ def get_model(cls, model: str) -> str:
+ if model in cls.models:
+ return model
+ elif model in cls.model_aliases:
+ return cls.model_aliases[model]
+ else:
+ return cls.default_model
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ proxy: str = None,
+ response: str = "url", # base64 or url
+ **kwargs
+ ) -> AsyncResult:
+ model = cls.get_model(model)
+
+ headers = {
+ "Content-Type": "application/json",
+ }
+ async with ClientSession(headers=headers) as session:
+ # Directly use the messages as the prompt
+ data = {
+ "prompt": messages,
+ "model": model,
+ "response": response,
+ "data": {
+ "prompt_negative": "",
+ "guidance_scale": 9
+ }
+ }
+ async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
+ response.raise_for_status()
+ text_response = await response.text()
+
+ # Clean the response by removing unexpected characters
+ cleaned_response = text_response.strip('__')
+
+ if not cleaned_response.strip():
+ raise ValueError("Received an empty response from the server.")
+
+ try:
+ json_response = json.loads(cleaned_response)
+ image_url = json_response.get("images", [])[0]
+ # Create an ImageResponse object
+ image_response = ImageResponse(images=image_url, alt="Generated Image")
+ yield image_response
+ except json.JSONDecodeError:
+ raise ValueError("Unable to decode JSON from the received text response.")