summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Vercel.py
diff options
context:
space:
mode:
authorabc <98614666+xtekky@users.noreply.github.com>2023-08-27 17:37:44 +0200
committerabc <98614666+xtekky@users.noreply.github.com>2023-08-27 17:37:44 +0200
commitefd75a11b871d61ac31b0e274acdfb33daba361d (patch)
tree22bd158324444cdbb9dbed46f25f8dffa60f3756 /g4f/Provider/Vercel.py
parent ~ | v-0.0.2.5 (diff)
downloadgpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.tar
gpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.tar.gz
gpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.tar.bz2
gpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.tar.lz
gpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.tar.xz
gpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.tar.zst
gpt4free-efd75a11b871d61ac31b0e274acdfb33daba361d.zip
Diffstat (limited to 'g4f/Provider/Vercel.py')
-rw-r--r--g4f/Provider/Vercel.py27
1 files changed, 10 insertions, 17 deletions
diff --git a/g4f/Provider/Vercel.py b/g4f/Provider/Vercel.py
index 186662c4..f2377db4 100644
--- a/g4f/Provider/Vercel.py
+++ b/g4f/Provider/Vercel.py
@@ -1,26 +1,21 @@
-import base64
-import json
-import uuid
+import base64, json, uuid, quickjs
-import quickjs
-from curl_cffi import requests
-
-from ..typing import Any, CreateResult, TypedDict
+from curl_cffi import requests
+from ..typing import Any, CreateResult, TypedDict
from .base_provider import BaseProvider
class Vercel(BaseProvider):
- url = "https://play.vercel.ai"
- working = True
+ url = "https://play.vercel.ai"
+ working = True
supports_gpt_35_turbo = True
@staticmethod
def create_completion(
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
+ stream: bool, **kwargs: Any) -> CreateResult:
+
if model in ["gpt-3.5-turbo", "gpt-4"]:
model = "openai:" + model
yield _chat(model_id=model, messages=messages)
@@ -29,8 +24,8 @@ class Vercel(BaseProvider):
def _chat(model_id: str, messages: list[dict[str, str]]) -> str:
session = requests.Session(impersonate="chrome107")
- url = "https://sdk.vercel.ai/api/generate"
- header = _create_header(session)
+ url = "https://sdk.vercel.ai/api/generate"
+ header = _create_header(session)
payload = _create_payload(model_id, messages)
response = session.post(url=url, headers=header, json=payload)
@@ -44,15 +39,13 @@ def _create_payload(model_id: str, messages: list[dict[str, str]]) -> dict[str,
"messages": messages,
"playgroundId": str(uuid.uuid4()),
"chatIndex": 0,
- "model": model_id,
- } | default_params
+ "model": model_id} | default_params
def _create_header(session: requests.Session):
custom_encoding = _get_custom_encoding(session)
return {"custom-encoding": custom_encoding}
-
# based on https://github.com/ading2210/vercel-llm-api
def _get_custom_encoding(session: requests.Session):
url = "https://sdk.vercel.ai/openai.jpeg"