diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-08-28 22:08:23 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-28 22:08:23 +0200 |
commit | 7e687b3d178c00a27d7e5ae2613fe88ee7844639 (patch) | |
tree | 4034e8ae9fc7ca9af295f04358bb00516b464e0b /g4f/Provider/Liaobots.py | |
parent | Merge pull request #851 from Luneye/patch-1 (diff) | |
parent | Merge branch 'main' into hugging (diff) | |
download | gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.gz gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.bz2 gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.lz gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.xz gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.zst gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.zip |
Diffstat (limited to 'g4f/Provider/Liaobots.py')
-rw-r--r-- | g4f/Provider/Liaobots.py | 113 |
1 files changed, 66 insertions, 47 deletions
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py index a8bb83e8..e69a565e 100644 --- a/g4f/Provider/Liaobots.py +++ b/g4f/Provider/Liaobots.py @@ -1,59 +1,77 @@ -import uuid, requests +import uuid +import json +from aiohttp import ClientSession -from ..typing import Any, CreateResult -from .base_provider import BaseProvider +from ..typing import AsyncGenerator +from .base_provider import AsyncGeneratorProvider +models = { + "gpt-4": { + "id": "gpt-4", + "name": "GPT-4", + "maxLength": 24000, + "tokenLimit": 8000, + }, + "gpt-3.5-turbo": { + "id": "gpt-3.5-turbo", + "name": "GPT-3.5", + "maxLength": 12000, + "tokenLimit": 4000, + }, + "gpt-3.5-turbo-16k": { + "id": "gpt-3.5-turbo-16k", + "name": "GPT-3.5-16k", + "maxLength": 48000, + "tokenLimit": 16000, + }, +} -class Liaobots(BaseProvider): - url: str = "https://liaobots.com" - supports_stream = True - needs_auth = True - supports_gpt_35_turbo = True - supports_gpt_4 = True +class Liaobots(AsyncGeneratorProvider): + url = "https://liaobots.com" + supports_stream = True + supports_gpt_35_turbo = True + supports_gpt_4 = True + _auth_code = None - @staticmethod - def create_completion( + @classmethod + async def create_async_generator( + cls, model: str, messages: list[dict[str, str]], - stream: bool, **kwargs: Any) -> CreateResult: - + auth: str = None, + proxy: str = None, + **kwargs + ) -> AsyncGenerator: + if proxy and "://" not in proxy: + proxy = f"http://{proxy}" headers = { - "authority" : "liaobots.com", - "content-type" : "application/json", - "origin" : "https://liaobots.com", - "referer" : "https://liaobots.com/", - "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36", - "x-auth-code" : str(kwargs.get("auth")), - } - - models = { - "gpt-4": { - "id": "gpt-4", - "name": "GPT-4", - "maxLength": 24000, - "tokenLimit": 8000, - }, - "gpt-3.5-turbo": { - "id": "gpt-3.5-turbo", - "name": "GPT-3.5", - "maxLength": 12000, - "tokenLimit": 4000, - }, - } - json_data = { - "conversationId": str(uuid.uuid4()), - "model" : models[model], - "messages" : messages, - "key" : "", - "prompt" : "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.", + "authority": "liaobots.com", + "content-type": "application/json", + "origin": "https://liaobots.com", + "referer": "https://liaobots.com/", + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36", } + async with ClientSession( + headers=headers + ) as session: + model = model if model in models else "gpt-3.5-turbo" + auth_code = auth if isinstance(auth, str) else cls._auth_code + if not auth_code: + async with session.post("https://liaobots.com/api/user", proxy=proxy, json={"authcode": ""}) as response: + response.raise_for_status() + auth_code = cls._auth_code = json.loads((await response.text()))["authCode"] + data = { + "conversationId": str(uuid.uuid4()), + "model": models[model], + "messages": messages, + "key": "", + "prompt": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully.", + } + async with session.post("https://liaobots.com/api/chat", proxy=proxy, json=data, headers={"x-auth-code": auth_code}) as response: + response.raise_for_status() + async for line in response.content: + yield line.decode("utf-8") - response = requests.post("https://liaobots.com/api/chat", - headers=headers, json=json_data, stream=True) - - response.raise_for_status() - for token in response.iter_content(chunk_size=2046): - yield token.decode("utf-8") @classmethod @property @@ -62,6 +80,7 @@ class Liaobots(BaseProvider): ("model", "str"), ("messages", "list[dict[str, str]]"), ("stream", "bool"), + ("proxy", "str"), ("auth", "str"), ] param = ", ".join([": ".join(p) for p in params]) |