diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-10-10 00:18:17 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-10 00:18:17 +0200 |
commit | 8a5c23f693c1ad92b5d5259201bb99807f76ea2a (patch) | |
tree | 26a25d7b5bee6ba1e2ac484e1ad124a8e52b0b43 /g4f/Provider/GptForLove.py | |
parent | Merge branch 'main' of https://github.com/xtekky/gpt4free (diff) | |
parent | Add Proxy Support and Create Provider to Readme (diff) | |
download | gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.gz gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.bz2 gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.lz gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.xz gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.tar.zst gpt4free-8a5c23f693c1ad92b5d5259201bb99807f76ea2a.zip |
Diffstat (limited to 'g4f/Provider/GptForLove.py')
-rw-r--r-- | g4f/Provider/GptForLove.py | 9 |
1 files changed, 5 insertions, 4 deletions
diff --git a/g4f/Provider/GptForLove.py b/g4f/Provider/GptForLove.py index 53c403e1..01cef443 100644 --- a/g4f/Provider/GptForLove.py +++ b/g4f/Provider/GptForLove.py @@ -3,7 +3,7 @@ from __future__ import annotations from aiohttp import ClientSession import execjs, os, json -from ..typing import AsyncGenerator +from ..typing import AsyncResult, Messages from .base_provider import AsyncGeneratorProvider from .helper import format_prompt @@ -16,9 +16,10 @@ class GptForLove(AsyncGeneratorProvider): async def create_async_generator( cls, model: str, - messages: list[dict[str, str]], + messages: Messages, + proxy: str = None, **kwargs - ) -> AsyncGenerator: + ) -> AsyncResult: if not model: model = "gpt-3.5-turbo" headers = { @@ -47,7 +48,7 @@ class GptForLove(AsyncGeneratorProvider): "secret": get_secret(), **kwargs } - async with session.post("https://api.gptplus.one/chat-process", json=data) as response: + async with session.post("https://api.gptplus.one/chat-process", json=data, proxy=proxy) as response: response.raise_for_status() async for line in response.content: try: |