summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-10-20 22:39:21 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-10-20 22:39:21 +0200
commitac783e505b9f0bc7c459ab4e57aa7bed6458b949 (patch)
tree370e94f80cae6201c81c0e4ed87eff6b05dec7e5
parentupdate g4f/models.py g4f/Provider/Ai4Chat.py g4f/Provider/Chatgpt4Online.py (diff)
downloadgpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.tar
gpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.tar.gz
gpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.tar.bz2
gpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.tar.lz
gpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.tar.xz
gpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.tar.zst
gpt4free-ac783e505b9f0bc7c459ab4e57aa7bed6458b949.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/nexra/NexraBlackbox.py132
-rw-r--r--g4f/models.py5
2 files changed, 69 insertions, 68 deletions
diff --git a/g4f/Provider/nexra/NexraBlackbox.py b/g4f/Provider/nexra/NexraBlackbox.py
index e09774df..87eea8e2 100644
--- a/g4f/Provider/nexra/NexraBlackbox.py
+++ b/g4f/Provider/nexra/NexraBlackbox.py
@@ -1,20 +1,22 @@
from __future__ import annotations
import json
-from aiohttp import ClientSession, ClientTimeout, ClientError
+import requests
-from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ...typing import CreateResult, Messages
+from ..base_provider import ProviderModelMixin, AbstractProvider
+from ..helper import format_prompt
-class NexraBlackbox(AsyncGeneratorProvider, ProviderModelMixin):
- label = "Nexra Blackbox"
- url = "https://nexra.aryahcr.cc/documentation/blackbox/en"
- api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements"
- working = False
+class NexraBlackbox(AbstractProvider, ProviderModelMixin):
+ url = "https://nexra.aryahcr.cc/api/chat/complements"
+ working = True
supports_stream = True
- default_model = 'blackbox'
- models = [default_model]
+ default_model = "blackbox"
+
+ models = [
+ 'blackbox',
+ ]
model_aliases = {
"blackboxai": "blackbox",
@@ -28,74 +30,72 @@ class NexraBlackbox(AsyncGeneratorProvider, ProviderModelMixin):
return cls.model_aliases[model]
else:
return cls.default_model
-
+
@classmethod
- async def create_async_generator(
+ def create_completion(
cls,
model: str,
messages: Messages,
- proxy: str = None,
- stream: bool = False,
- markdown: bool = False,
- websearch: bool = False,
+ stream: bool,
**kwargs
- ) -> AsyncResult:
- model = cls.get_model(model)
+ ) -> CreateResult:
+ model = model or cls.default_model
headers = {
- "Content-Type": "application/json"
+ 'Content-Type': 'application/json'
}
- payload = {
- "messages": [{"role": msg["role"], "content": msg["content"]} for msg in messages],
- "websearch": websearch,
+ data = {
+ "messages": [
+ {
+ "role": "user",
+ "content": format_prompt(messages)
+ }
+ ],
+ "websearch": False,
"stream": stream,
- "markdown": markdown,
+ "markdown": False,
"model": model
}
-
- timeout = ClientTimeout(total=600) # 10 minutes timeout
- try:
- async with ClientSession(headers=headers, timeout=timeout) as session:
- async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
- if response.status != 200:
- error_text = await response.text()
- raise Exception(f"Error: {response.status} - {error_text}")
-
- content = await response.text()
-
- # Split content by Record Separator character
- parts = content.split('\x1e')
- full_message = ""
- links = []
-
- for part in parts:
- if part:
- try:
- json_response = json.loads(part)
-
- if json_response.get("message"):
- full_message = json_response["message"] # Overwrite instead of append
-
- if isinstance(json_response.get("search"), list):
- links = json_response["search"] # Overwrite instead of extend
-
- if json_response.get("finish", False):
- break
-
- except json.JSONDecodeError:
- pass
-
- if full_message:
- yield full_message.strip()
+ response = requests.post(cls.url, headers=headers, json=data, stream=stream)
+
+ if stream:
+ return cls.process_streaming_response(response)
+ else:
+ return cls.process_non_streaming_response(response)
- if payload["websearch"] and links:
- yield "\n\n**Source:**"
- for i, link in enumerate(links, start=1):
- yield f"\n{i}. {link['title']}: {link['link']}"
+ @classmethod
+ def process_non_streaming_response(cls, response):
+ if response.status_code == 200:
+ try:
+ full_response = ""
+ for line in response.iter_lines(decode_unicode=True):
+ if line:
+ data = json.loads(line)
+ if data.get('finish'):
+ break
+ message = data.get('message', '')
+ if message:
+ full_response = message
+ return full_response
+ except json.JSONDecodeError:
+ return "Error: Unable to decode JSON response"
+ else:
+ return f"Error: {response.status_code}"
- except ClientError:
- raise
- except Exception:
- raise
+ @classmethod
+ def process_streaming_response(cls, response):
+ previous_message = ""
+ for line in response.iter_lines(decode_unicode=True):
+ if line:
+ try:
+ data = json.loads(line)
+ if data.get('finish'):
+ break
+ message = data.get('message', '')
+ if message and message != previous_message:
+ yield message[len(previous_message):]
+ previous_message = message
+ except json.JSONDecodeError:
+ pass
diff --git a/g4f/models.py b/g4f/models.py
index d7800c76..ecea56bd 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -443,7 +443,8 @@ reka_core = Model(
blackboxai = Model(
name = 'blackboxai',
base_provider = 'Blackbox AI',
- best_provider = IterListProvider([Blackbox, NexraBlackbox])
+ #best_provider = IterListProvider([Blackbox, NexraBlackbox])
+ best_provider = IterListProvider([NexraBlackbox])
)
blackboxai_pro = Model(
@@ -766,7 +767,7 @@ flux = Model(
flux_pro = Model(
name = 'flux-pro',
base_provider = 'Flux AI',
- best_provider = IterListProvider([NexraFluxPro, AmigoChat])
+ best_provider = IterListProvider([AmigoChat, NexraFluxPro])
)