diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-10-10 00:18:17 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-10 00:18:17 +0200 |
commit | 8a5c23f693c1ad92b5d5259201bb99807f76ea2a (patch) | |
tree | 26a25d7b5bee6ba1e2ac484e1ad124a8e52b0b43 /g4f/Provider/Vercel.py | |
parent | Merge branch 'main' of https://github.com/xtekky/gpt4free (diff) | |
parent | Add Proxy Support and Create Provider to Readme (diff) | |
download | gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.gz gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.bz2 gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.lz gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.xz gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.zst gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.zip |
Diffstat (limited to 'g4f/Provider/Vercel.py')
-rw-r--r-- | g4f/Provider/Vercel.py | 14 |
1 files changed, 9 insertions, 5 deletions
diff --git a/g4f/Provider/Vercel.py b/g4f/Provider/Vercel.py index 2d20ca6a..2d856664 100644 --- a/g4f/Provider/Vercel.py +++ b/g4f/Provider/Vercel.py @@ -2,7 +2,7 @@ from __future__ import annotations import json, base64, requests, execjs, random, uuid -from ..typing import Any, TypedDict, CreateResult +from ..typing import Messages, TypedDict, CreateResult from .base_provider import BaseProvider from abc import abstractmethod @@ -17,8 +17,9 @@ class Vercel(BaseProvider): @abstractmethod def create_completion( model: str, - messages: list[dict[str, str]], + messages: Messages, stream: bool, + proxy: str = None, **kwargs ) -> CreateResult: if not model: @@ -52,15 +53,18 @@ class Vercel(BaseProvider): 'model' : model_info[model]['id'], 'messages' : messages, 'playgroundId': str(uuid.uuid4()), - 'chatIndex' : 0} | model_info[model]['default_params'] + 'chatIndex' : 0, + **model_info[model]['default_params'], + **kwargs + } max_retries = kwargs.get('max_retries', 20) for i in range(max_retries): response = requests.post('https://sdk.vercel.ai/api/generate', - headers=headers, json=json_data, stream=True) + headers=headers, json=json_data, stream=True, proxies={"https": proxy}) try: response.raise_for_status() - except: + except Exception: continue for token in response.iter_content(chunk_size=None): yield token.decode() |