summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/GptChatly.py
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2023-12-02 05:40:07 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2023-12-02 05:40:07 +0100
commit2157ccbcdb5d781d389e24db332d2fb78b1159a9 (patch)
tree0ba2ad0cb7fffd5b4882e2dad25bea1b6f984e33 /g4f/Provider/GptChatly.py
parentAdd proxy support / fix streaming (diff)
downloadgpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.tar
gpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.tar.gz
gpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.tar.bz2
gpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.tar.lz
gpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.tar.xz
gpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.tar.zst
gpt4free-2157ccbcdb5d781d389e24db332d2fb78b1159a9.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/GptChatly.py49
1 files changed, 14 insertions, 35 deletions
diff --git a/g4f/Provider/GptChatly.py b/g4f/Provider/GptChatly.py
index dcedfe1b..d98c2af4 100644
--- a/g4f/Provider/GptChatly.py
+++ b/g4f/Provider/GptChatly.py
@@ -1,9 +1,8 @@
from __future__ import annotations
-from ..requests import StreamSession
+from ..requests import Session, get_session_from_browser
from ..typing import Messages
from .base_provider import AsyncProvider
-from .helper import get_cookies
class GptChatly(AsyncProvider):
@@ -18,40 +17,20 @@ class GptChatly(AsyncProvider):
cls,
model: str,
messages: Messages,
- proxy: str = None, cookies: dict = None, **kwargs) -> str:
-
- cookies = get_cookies('gptchatly.com') if not cookies else cookies
- if not cookies:
- raise RuntimeError(
- "g4f.provider.GptChatly requires cookies, [refresh https://gptchatly.com on chrome]"
- )
-
+ proxy: str = None,
+ timeout: int = 120,
+ session: Session = None,
+ **kwargs
+ ) -> str:
+ if not session:
+ session = get_session_from_browser(cls.url, proxy=proxy, timeout=timeout)
if model.startswith("gpt-4"):
chat_url = f"{cls.url}/fetch-gpt4-response"
else:
- chat_url = f"{cls.url}/fetch-response"
-
- headers = {
- 'authority': 'gptchatly.com',
- 'accept': '*/*',
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'content-type': 'application/json',
- 'origin': 'https://gptchatly.com',
- 'referer': 'https://gptchatly.com/',
- 'sec-ch-ua': '"Chromium";v="118", "Google Chrome";v="118", "Not=A?Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'same-origin',
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36',
+ chat_url = f"{cls.url}/felch-response"
+ data = {
+ "past_conversations": messages
}
-
- async with StreamSession(headers=headers,
- proxies={"https": proxy}, cookies=cookies, impersonate='chrome110') as session:
- data = {
- "past_conversations": messages
- }
- async with session.post(chat_url, json=data) as response:
- response.raise_for_status()
- return (await response.json())["chatGPTResponse"] \ No newline at end of file
+ response = session.post(chat_url, json=data)
+ response.raise_for_status()
+ return response.json()["chatGPTResponse"] \ No newline at end of file