summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/FreeChatgpt.py
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-01-14 15:32:51 +0100
committerGitHub <noreply@github.com>2024-01-14 15:32:51 +0100
commit1ca80ed48b55d6462b4bd445e66d4f7de7442c2b (patch)
tree05a94b53b83461b8249de965e093b4fd3722e2d1 /g4f/Provider/FreeChatgpt.py
parentMerge pull request #1466 from hlohaus/upp (diff)
parentChange doctypes style to Google (diff)
downloadgpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.tar
gpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.tar.gz
gpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.tar.bz2
gpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.tar.lz
gpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.tar.xz
gpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.tar.zst
gpt4free-1ca80ed48b55d6462b4bd445e66d4f7de7442c2b.zip
Diffstat (limited to 'g4f/Provider/FreeChatgpt.py')
-rw-r--r--g4f/Provider/FreeChatgpt.py15
1 files changed, 10 insertions, 5 deletions
diff --git a/g4f/Provider/FreeChatgpt.py b/g4f/Provider/FreeChatgpt.py
index 75514118..0f993690 100644
--- a/g4f/Provider/FreeChatgpt.py
+++ b/g4f/Provider/FreeChatgpt.py
@@ -1,16 +1,20 @@
from __future__ import annotations
-import json
+import json, random
from aiohttp import ClientSession
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
-
models = {
- "claude-v2": "claude-2.0",
- "gemini-pro": "google-gemini-pro"
+ "claude-v2": "claude-2.0",
+ "claude-v2.1":"claude-2.1",
+ "gemini-pro": "google-gemini-pro"
}
+urls = [
+ "https://free.chatgpt.org.uk",
+ "https://ai.chatgpt.org.uk"
+]
class FreeChatgpt(AsyncGeneratorProvider):
url = "https://free.chatgpt.org.uk"
@@ -31,6 +35,7 @@ class FreeChatgpt(AsyncGeneratorProvider):
model = models[model]
elif not model:
model = "gpt-3.5-turbo"
+ url = random.choice(urls)
headers = {
"Accept": "application/json, text/event-stream",
"Content-Type":"application/json",
@@ -55,7 +60,7 @@ class FreeChatgpt(AsyncGeneratorProvider):
"top_p":1,
**kwargs
}
- async with session.post(f'{cls.url}/api/openai/v1/chat/completions', json=data, proxy=proxy) as response:
+ async with session.post(f'{url}/api/openai/v1/chat/completions', json=data, proxy=proxy) as response:
response.raise_for_status()
started = False
async for line in response.content: