diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-10-08 14:21:21 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-08 14:21:21 +0200 |
commit | d804b20694c8ffc88355adee610d1424a1df1263 (patch) | |
tree | 9750cb102284d9445d60c539f4e6d3586603e917 /g4f/Provider/ChatgptX.py | |
parent | ~ | `v-0.1.5.5` (diff) | |
parent | Add Messages and AsyncResult typing (diff) | |
download | gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.tar gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.tar.gz gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.tar.bz2 gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.tar.lz gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.tar.xz gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.tar.zst gpt4free-d804b20694c8ffc88355adee610d1424a1df1263.zip |
Diffstat (limited to 'g4f/Provider/ChatgptX.py')
-rw-r--r-- | g4f/Provider/ChatgptX.py | 49 |
1 files changed, 38 insertions, 11 deletions
diff --git a/g4f/Provider/ChatgptX.py b/g4f/Provider/ChatgptX.py index 2d932af2..2944fb26 100644 --- a/g4f/Provider/ChatgptX.py +++ b/g4f/Provider/ChatgptX.py @@ -1,25 +1,26 @@ from __future__ import annotations import re +import json from aiohttp import ClientSession - -from .base_provider import AsyncProvider +from ..typing import AsyncResult, Messages +from .base_provider import AsyncGeneratorProvider from .helper import format_prompt -class ChatgptX(AsyncProvider): +class ChatgptX(AsyncGeneratorProvider): url = "https://chatgptx.de" supports_gpt_35_turbo = True working = True @classmethod - async def create_async( + async def create_async_generator( cls, model: str, - messages: list[dict[str, str]], + messages: Messages, **kwargs - ) -> str: + ) -> AsyncResult: headers = { 'accept-language': 'de-DE,de;q=0.9,en-DE;q=0.8,en;q=0.7,en-US', 'sec-ch-ua': '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"', @@ -63,8 +64,34 @@ class ChatgptX(AsyncProvider): } async with session.post(cls.url + '/sendchat', data=data, headers=headers) as response: response.raise_for_status() - data = await response.json() - if "message" in data: - return data["message"] - elif "messages" in data: - raise RuntimeError(f'Response: {data["messages"]}')
\ No newline at end of file + chat = await response.json() + if "response" not in chat or not chat["response"]: + raise RuntimeError(f'Response: {chat}') + headers = { + 'authority': 'chatgptx.de', + 'accept': 'text/event-stream', + 'referer': f'{cls.url}/', + 'x-csrf-token': csrf_token, + 'x-requested-with': 'XMLHttpRequest' + } + data = { + "user_id": user_id, + "chats_id": chat_id, + "prompt": format_prompt(messages), + "current_model": "gpt3", + "conversions_id": chat["conversions_id"], + "ass_conversions_id": chat["ass_conversions_id"], + } + async with session.get(f'{cls.url}/chats_stream', params=data, headers=headers) as response: + response.raise_for_status() + async for line in response.content: + if line.startswith(b"data: "): + row = line[6:-1] + if row == b"[DONE]": + break + try: + content = json.loads(row)["choices"][0]["delta"].get("content") + except: + raise RuntimeError(f"Broken line: {line.decode()}") + if content: + yield content
\ No newline at end of file |