diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-09-24 12:23:53 +0200 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-09-24 12:23:53 +0200 |
commit | f8e403a745c5caff31d7edb854dcba40eba3166d (patch) | |
tree | a75c6030a8054c56201fa2d41306a51b9052545c /g4f/Provider/ChatGptEs.py | |
parent | Added gpt-4o provider (diff) | |
download | gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.tar gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.tar.gz gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.tar.bz2 gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.tar.lz gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.tar.xz gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.tar.zst gpt4free-f8e403a745c5caff31d7edb854dcba40eba3166d.zip |
Diffstat (limited to 'g4f/Provider/ChatGptEs.py')
-rw-r--r-- | g4f/Provider/ChatGptEs.py | 142 |
1 files changed, 66 insertions, 76 deletions
diff --git a/g4f/Provider/ChatGptEs.py b/g4f/Provider/ChatGptEs.py index d94ba59e..0e7062e5 100644 --- a/g4f/Provider/ChatGptEs.py +++ b/g4f/Provider/ChatGptEs.py @@ -1,95 +1,85 @@ from __future__ import annotations -from ..typing import Messages, CreateResult -from ..providers.base_provider import AbstractProvider, ProviderModelMixin +from aiohttp import ClientSession +import os +import json +import re -import requests, os, re, json +from ..typing import AsyncResult, Messages +from .base_provider import AsyncGeneratorProvider, ProviderModelMixin +from .helper import format_prompt -class ChatGptEs(AbstractProvider, ProviderModelMixin): - label = "ChatGptEs" +class ChatGptEs(AsyncGeneratorProvider, ProviderModelMixin): + url = "https://chatgpt.es" + api_endpoint = "https://chatgpt.es/wp-admin/admin-ajax.php" working = True - supports_message_history = True + supports_gpt_4 = True + supports_stream = True supports_system_message = True - supports_stream = False + supports_message_history = True + + default_model = 'gpt-4o' + models = ['gpt-4o', 'gpt-4o-mini', 'chatgpt-4o-latest'] + + model_aliases = { + "gpt-4o": "chatgpt-4o-latest", + } + + @classmethod + def get_model(cls, model: str) -> str: + if model in cls.models: + return model + elif model in cls.model_aliases: + return cls.model_aliases[model] + else: + return cls.default_model @classmethod - def create_completion( + async def create_async_generator( cls, model: str, messages: Messages, - stream: bool, + proxy: str = None, **kwargs - ) -> CreateResult: + ) -> AsyncResult: + model = cls.get_model(model) - if model not in [ - 'gpt-4o', - 'gpt-4o-mini', - 'chatgpt-4o-latest' - ]: - raise ValueError(f"Unsupported model: {model}") - headers = { - 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7', - 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', - 'cache-control': 'no-cache', - 'pragma': 'no-cache', - 'priority': 'u=0, i', - 'referer': 'https://www.google.com/', - 'sec-ch-ua': '"Google Chrome";v="129", "Not=A?Brand";v="8", "Chromium";v="129"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'document', - 'sec-fetch-mode': 'navigate', - 'sec-fetch-site': 'cross-site', - 'sec-fetch-user': '?1', - 'upgrade-insecure-requests': '1', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36', + "authority": "chatgpt.es", + "accept": "application/json", + "origin": cls.url, + "referer": f"{cls.url}/chat", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36", } - response = requests.get('https://chatgpt.es/', headers=headers) - nonce_ = re.findall(r'data-nonce="(.+?)"', response.text)[0] - post_id = re.findall(r'data-post-id="(.+?)"', response.text)[0] + async with ClientSession(headers=headers) as session: + initial_response = await session.get(cls.url) + nonce_ = re.findall(r'data-nonce="(.+?)"', await initial_response.text())[0] + post_id = re.findall(r'data-post-id="(.+?)"', await initial_response.text())[0] - headers = { - 'accept': '*/*', - 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3', - 'cache-control': 'no-cache', - 'origin': 'https://chatgpt.es', - 'pragma': 'no-cache', - 'priority': 'u=1, i', - 'referer': 'https://chatgpt.es/', - 'sec-ch-ua': '"Google Chrome";v="129", "Not=A?Brand";v="8", "Chromium";v="129"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36', - } - - conversation_history = [ - "Human: stricly respond in the same language as my prompt, preferably english" - ] - - for message in messages[:-1]: - if message['role'] == "user": - conversation_history.append(f"Human: {message['content']}") - else: - conversation_history.append(f"AI: {message['content']}") + conversation_history = [ + "Human: strictly respond in the same language as my prompt, preferably English" + ] - payload = { - '_wpnonce': nonce_, - 'post_id': post_id, - 'url': 'https://chatgpt.es', - 'action': 'wpaicg_chat_shortcode_message', - 'message': messages[-1]['content'], - 'bot_id': '0', - 'chatbot_identity': 'shortcode', - 'wpaicg_chat_client_id': os.urandom(5).hex(), - 'wpaicg_chat_history': json.dumps(conversation_history) - } + for message in messages[:-1]: + if message['role'] == "user": + conversation_history.append(f"Human: {message['content']}") + else: + conversation_history.append(f"AI: {message['content']}") - response = requests.post('https://chatgpt.es/wp-admin/admin-ajax.php', - headers=headers, data=payload).json() - - return (response['data'])
\ No newline at end of file + payload = { + '_wpnonce': nonce_, + 'post_id': post_id, + 'url': cls.url, + 'action': 'wpaicg_chat_shortcode_message', + 'message': messages[-1]['content'], + 'bot_id': '0', + 'chatbot_identity': 'shortcode', + 'wpaicg_chat_client_id': os.urandom(5).hex(), + 'wpaicg_chat_history': json.dumps(conversation_history) + } + + async with session.post(cls.api_endpoint, headers=headers, data=payload) as response: + response.raise_for_status() + result = await response.json() + yield result['data'] |