summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/ChatForAi.py
diff options
context:
space:
mode:
authorHeiner Lohaus <heiner@lohaus.eu>2023-10-12 21:02:51 +0200
committerHeiner Lohaus <heiner@lohaus.eu>2023-10-12 21:02:51 +0200
commit5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe (patch)
tree4cdec5a40b918ae111fa552d62ddcc6aba94642c /g4f/Provider/ChatForAi.py
parentAdd ChatgptLogin, ChatgptFree and GptChatly Provider (diff)
downloadgpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.tar
gpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.tar.gz
gpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.tar.bz2
gpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.tar.lz
gpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.tar.xz
gpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.tar.zst
gpt4free-5048f01e26f8e542f7e9bf8aeb7ed55dd521f8fe.zip
Diffstat (limited to 'g4f/Provider/ChatForAi.py')
-rw-r--r--g4f/Provider/ChatForAi.py54
1 files changed, 0 insertions, 54 deletions
diff --git a/g4f/Provider/ChatForAi.py b/g4f/Provider/ChatForAi.py
deleted file mode 100644
index f2fe0335..00000000
--- a/g4f/Provider/ChatForAi.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from __future__ import annotations
-
-from ..typing import AsyncResult, Messages
-from ..requests import StreamSession
-from .base_provider import AsyncGeneratorProvider
-
-
-class ChatForAi(AsyncGeneratorProvider):
- url = "https://chatforai.com"
- supports_gpt_35_turbo = True
- working = True
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- timeout: int = 120,
- **kwargs
- ) -> AsyncResult:
- async with StreamSession(impersonate="chrome107", proxies={"https": proxy}, timeout=timeout) as session:
- prompt = messages[-1]["content"]
- data = {
- "conversationId": "temp",
- "conversationType": "chat_continuous",
- "botId": "chat_continuous",
- "globalSettings":{
- "baseUrl": "https://api.openai.com",
- "model": model if model else "gpt-3.5-turbo",
- "messageHistorySize": 5,
- "temperature": 0.7,
- "top_p": 1,
- **kwargs
- },
- "botSettings": {},
- "prompt": prompt,
- "messages": messages,
- }
- async with session.post(f"{cls.url}/api/handle/provider-openai", json=data) as response:
- response.raise_for_status()
- async for chunk in response.iter_content():
- yield chunk.decode()
-
- @classmethod
- @property
- def params(cls):
- params = [
- ("model", "str"),
- ("messages", "list[dict[str, str]]"),
- ("stream", "bool"),
- ]
- param = ", ".join([": ".join(p) for p in params])
- return f"g4f.provider.{cls.__name__} supports: ({param})" \ No newline at end of file