summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/Provider/ChatgptAi.py72
-rw-r--r--g4f/Provider/Geekgpt.py14
2 files changed, 47 insertions, 39 deletions
diff --git a/g4f/Provider/ChatgptAi.py b/g4f/Provider/ChatgptAi.py
index cf45909c..d5fd5bff 100644
--- a/g4f/Provider/ChatgptAi.py
+++ b/g4f/Provider/ChatgptAi.py
@@ -1,36 +1,34 @@
from __future__ import annotations
-import re
+import re, html, json, string, random
from aiohttp import ClientSession
-from ..typing import Messages
-from .base_provider import AsyncProvider, format_prompt
+from ..typing import Messages, AsyncResult
+from .base_provider import AsyncGeneratorProvider
-class ChatgptAi(AsyncProvider):
- url: str = "https://chatgpt.ai/"
+class ChatgptAi(AsyncGeneratorProvider):
+ url: str = "https://chatgpt.ai"
working = True
supports_gpt_35_turbo = True
- _nonce = None
- _post_id = None
- _bot_id = None
+ _system = None
@classmethod
- async def create_async(
+ async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
- ) -> str:
+ ) -> AsyncResult:
headers = {
"authority" : "chatgpt.ai",
"accept" : "*/*",
- "accept-language" : "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
+ "accept-language" : "en-US",
"cache-control" : "no-cache",
- "origin" : "https://chatgpt.ai",
+ "origin" : cls.url,
"pragma" : "no-cache",
- "referer" : cls.url,
+ "referer" : f"{cls.url}/",
"sec-ch-ua" : '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
"sec-ch-ua-mobile" : "?0",
"sec-ch-ua-platform" : '"Windows"',
@@ -42,34 +40,40 @@ class ChatgptAi(AsyncProvider):
async with ClientSession(
headers=headers
) as session:
- if not cls._nonce:
+ if not cls._system:
async with session.get(cls.url, proxy=proxy) as response:
response.raise_for_status()
text = await response.text()
- result = re.search(r'data-nonce="(.*?)"', text)
+ result = re.search(r"data-system='(.*?)'", text)
if result:
- cls._nonce = result.group(1)
- result = re.search(r'data-post-id="(.*?)"', text)
- if result:
- cls._post_id = result.group(1)
- result = re.search(r'data-bot-id="(.*?)"', text)
- if result:
- cls._bot_id = result.group(1)
- if not cls._nonce or not cls._post_id or not cls._bot_id:
- raise RuntimeError("Nonce, post-id or bot-id not found")
-
+ cls._system = json.loads(html.unescape(result.group(1)))
+ if not cls._system:
+ raise RuntimeError("System args not found")
+
data = {
- "_wpnonce": cls._nonce,
- "post_id": cls._post_id,
- "url": "https://chatgpt.ai",
- "action": "wpaicg_chat_shortcode_message",
- "message": format_prompt(messages),
- "bot_id": cls._bot_id
+ "botId": cls._system["botId"],
+ "customId": cls._system["customId"],
+ "session": cls._system["sessionId"],
+ "chatId": "".join(random.choices(f"{string.ascii_lowercase}{string.digits}", k=11)),
+ "contextId": cls._system["contextId"],
+ "messages": messages,
+ "newMessage": messages[-1]["content"],
+ "stream": True
}
async with session.post(
- "https://chatgpt.ai/wp-admin/admin-ajax.php",
+ f"{cls.url}/wp-json/mwai-ui/v1/chats/submit",
proxy=proxy,
- data=data
+ json=data
) as response:
response.raise_for_status()
- return (await response.json())["data"] \ No newline at end of file
+ async for line in response.content:
+ if line.startswith(b"data: "):
+ try:
+ line = json.loads(line[6:])
+ assert "type" in line
+ except:
+ raise RuntimeError(f"Broken line: {line.decode()}")
+ if line["type"] == "live":
+ yield line["data"]
+ elif line["type"] == "end":
+ break \ No newline at end of file
diff --git a/g4f/Provider/Geekgpt.py b/g4f/Provider/Geekgpt.py
index 3bbc0e75..3c577cf8 100644
--- a/g4f/Provider/Geekgpt.py
+++ b/g4f/Provider/Geekgpt.py
@@ -14,11 +14,15 @@ class GeekGpt(BaseProvider):
supports_gpt_4 = True
@classmethod
- def create_completion(cls,
- model: str,
- messages: Messages,
- stream: bool, **kwargs) -> CreateResult:
-
+ def create_completion(
+ cls,
+ model: str,
+ messages: Messages,
+ stream: bool,
+ **kwargs
+ ) -> CreateResult:
+ if not model:
+ model = "gpt-3.5-turbo"
json_data = {
'messages': messages,
'model': model,