summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Vercel.py
diff options
context:
space:
mode:
authorⲘrṨhส∂ow <71973368+MrShadowDev@users.noreply.github.com>2023-10-23 09:46:25 +0200
committerGitHub <noreply@github.com>2023-10-23 09:46:25 +0200
commit3982f39424ea037aca1086d45c6f657b4bfc457c (patch)
tree987290c5dc5822cb0197e789df68488536b1637c /g4f/Provider/Vercel.py
parent~ | g4f `v-0.1.7.5` (diff)
downloadgpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.tar
gpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.tar.gz
gpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.tar.bz2
gpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.tar.lz
gpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.tar.xz
gpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.tar.zst
gpt4free-3982f39424ea037aca1086d45c6f657b4bfc457c.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/Vercel.py66
1 files changed, 30 insertions, 36 deletions
diff --git a/g4f/Provider/Vercel.py b/g4f/Provider/Vercel.py
index 7c3b8c55..eb5aa20a 100644
--- a/g4f/Provider/Vercel.py
+++ b/g4f/Provider/Vercel.py
@@ -24,30 +24,27 @@ class Vercel(BaseProvider):
if not model:
model = "gpt-3.5-turbo"
-
+
elif model not in model_info:
raise ValueError(f"Vercel does not support {model}")
-
+
headers = {
- 'authority' : 'sdk.vercel.ai',
- 'accept' : '*/*',
- 'accept-language' : 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'cache-control' : 'no-cache',
- 'content-type' : 'application/json',
- 'custom-encoding' : get_anti_bot_token(),
- 'origin' : 'https://sdk.vercel.ai',
- 'pragma' : 'no-cache',
- 'referer' : 'https://sdk.vercel.ai/',
- 'sec-ch-ua' : '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"',
- 'sec-ch-ua-mobile' : '?0',
+ 'authority': 'sdk.vercel.ai',
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'cache-control': 'no-cache',
+ 'content-type': 'application/json',
+ 'custom-encoding': get_anti_bot_token(),
+ 'origin': 'https://sdk.vercel.ai',
+ 'pragma': 'no-cache',
+ 'referer': 'https://sdk.vercel.ai/',
+ 'sec-ch-ua': '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"',
+ 'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest' : 'empty',
- 'sec-fetch-mode' : 'cors',
- 'sec-fetch-site' : 'same-origin',
- 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.%s.%s Safari/537.36' % (
- random.randint(99, 999),
- random.randint(99, 999)
- )
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': f'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.{random.randint(99, 999)}.{random.randint(99, 999)} Safari/537.36',
}
json_data = {
@@ -60,7 +57,7 @@ class Vercel(BaseProvider):
}
max_retries = kwargs.get('max_retries', 20)
- for i in range(max_retries):
+ for _ in range(max_retries):
response = requests.post('https://sdk.vercel.ai/api/generate',
headers=headers, json=json_data, stream=True, proxies={"https": proxy})
try:
@@ -74,22 +71,19 @@ class Vercel(BaseProvider):
def get_anti_bot_token() -> str:
headers = {
- 'authority' : 'sdk.vercel.ai',
- 'accept' : '*/*',
- 'accept-language' : 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'cache-control' : 'no-cache',
- 'pragma' : 'no-cache',
- 'referer' : 'https://sdk.vercel.ai/',
- 'sec-ch-ua' : '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"',
- 'sec-ch-ua-mobile' : '?0',
+ 'authority': 'sdk.vercel.ai',
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'cache-control': 'no-cache',
+ 'pragma': 'no-cache',
+ 'referer': 'https://sdk.vercel.ai/',
+ 'sec-ch-ua': '"Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117"',
+ 'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest' : 'empty',
- 'sec-fetch-mode' : 'cors',
- 'sec-fetch-site' : 'same-origin',
- 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.%s.%s Safari/537.36' % (
- random.randint(99, 999),
- random.randint(99, 999)
- )
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': f'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.{random.randint(99, 999)}.{random.randint(99, 999)} Safari/537.36',
}
response = requests.get('https://sdk.vercel.ai/openai.jpeg',