From 6dd378d2aca256f45ff7b2fd23c59497aad82045 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Wed, 6 Nov 2024 18:39:14 +0200 Subject: Update (g4f/Provider/) --- g4f/Provider/Bing.py | 2 +- g4f/Provider/BingCreateImages.py | 54 -------------- g4f/Provider/GeminiPro.py | 107 ---------------------------- g4f/Provider/WhiteRabbitNeo.py | 57 --------------- g4f/Provider/__init__.py | 3 - g4f/Provider/needs_auth/BingCreateImages.py | 54 ++++++++++++++ g4f/Provider/needs_auth/GeminiPro.py | 107 ++++++++++++++++++++++++++++ g4f/Provider/needs_auth/OpenaiChat.py | 1 + g4f/Provider/needs_auth/WhiteRabbitNeo.py | 57 +++++++++++++++ g4f/Provider/needs_auth/__init__.py | 32 +++++---- 10 files changed, 238 insertions(+), 236 deletions(-) delete mode 100644 g4f/Provider/BingCreateImages.py delete mode 100644 g4f/Provider/GeminiPro.py delete mode 100644 g4f/Provider/WhiteRabbitNeo.py create mode 100644 g4f/Provider/needs_auth/BingCreateImages.py create mode 100644 g4f/Provider/needs_auth/GeminiPro.py create mode 100644 g4f/Provider/needs_auth/WhiteRabbitNeo.py (limited to 'g4f') diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py index f04b1a54..cdc2b9d9 100644 --- a/g4f/Provider/Bing.py +++ b/g4f/Provider/Bing.py @@ -17,7 +17,7 @@ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .helper import get_random_hex from .bing.upload_image import upload_image from .bing.conversation import Conversation, create_conversation, delete_conversation -from .BingCreateImages import BingCreateImages +from .needs_auth.BingCreateImages import BingCreateImages from .. import debug class Tones: diff --git a/g4f/Provider/BingCreateImages.py b/g4f/Provider/BingCreateImages.py deleted file mode 100644 index 7a206c8f..00000000 --- a/g4f/Provider/BingCreateImages.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -from ..cookies import get_cookies -from ..image import ImageResponse -from ..errors import MissingAuthError -from ..typing import AsyncResult, Messages, Cookies -from .base_provider import AsyncGeneratorProvider, ProviderModelMixin -from .bing.create_images import create_images, create_session - -class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin): - label = "Microsoft Designer in Bing" - parent = "Bing" - url = "https://www.bing.com/images/create" - working = True - needs_auth = True - image_models = ["dall-e"] - - def __init__(self, cookies: Cookies = None, proxy: str = None, api_key: str = None) -> None: - if api_key is not None: - if cookies is None: - cookies = {} - cookies["_U"] = api_key - self.cookies = cookies - self.proxy = proxy - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - api_key: str = None, - cookies: Cookies = None, - proxy: str = None, - **kwargs - ) -> AsyncResult: - session = BingCreateImages(cookies, proxy, api_key) - yield await session.generate(messages[-1]["content"]) - - async def generate(self, prompt: str) -> ImageResponse: - """ - Asynchronously creates a markdown formatted string with images based on the prompt. - - Args: - prompt (str): Prompt to generate images. - - Returns: - str: Markdown formatted string with images. - """ - cookies = self.cookies or get_cookies(".bing.com", False) - if cookies is None or "_U" not in cookies: - raise MissingAuthError('Missing "_U" cookie') - async with create_session(cookies, self.proxy) as session: - images = await create_images(session, prompt) - return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"} if len(images) > 1 else {}) \ No newline at end of file diff --git a/g4f/Provider/GeminiPro.py b/g4f/Provider/GeminiPro.py deleted file mode 100644 index 06bf69ee..00000000 --- a/g4f/Provider/GeminiPro.py +++ /dev/null @@ -1,107 +0,0 @@ -from __future__ import annotations - -import base64 -import json -from aiohttp import ClientSession, BaseConnector - -from ..typing import AsyncResult, Messages, ImageType -from .base_provider import AsyncGeneratorProvider, ProviderModelMixin -from ..image import to_bytes, is_accepted_format -from ..errors import MissingAuthError -from .helper import get_connector - -class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): - label = "Gemini API" - url = "https://ai.google.dev" - working = True - supports_message_history = True - needs_auth = True - default_model = "gemini-1.5-pro-latest" - default_vision_model = default_model - models = [default_model, "gemini-pro", "gemini-pro-vision", "gemini-1.5-flash"] - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - stream: bool = False, - proxy: str = None, - api_key: str = None, - api_base: str = "https://generativelanguage.googleapis.com/v1beta", - use_auth_header: bool = False, - image: ImageType = None, - connector: BaseConnector = None, - **kwargs - ) -> AsyncResult: - model = cls.get_model(model) - - if not api_key: - raise MissingAuthError('Add a "api_key"') - - headers = params = None - if use_auth_header: - headers = {"Authorization": f"Bearer {api_key}"} - else: - params = {"key": api_key} - - method = "streamGenerateContent" if stream else "generateContent" - url = f"{api_base.rstrip('/')}/models/{model}:{method}" - async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session: - contents = [ - { - "role": "model" if message["role"] == "assistant" else "user", - "parts": [{"text": message["content"]}] - } - for message in messages - if message["role"] != "system" - ] - if image is not None: - image = to_bytes(image) - contents[-1]["parts"].append({ - "inline_data": { - "mime_type": is_accepted_format(image), - "data": base64.b64encode(image).decode() - } - }) - data = { - "contents": contents, - "generationConfig": { - "stopSequences": kwargs.get("stop"), - "temperature": kwargs.get("temperature"), - "maxOutputTokens": kwargs.get("max_tokens"), - "topP": kwargs.get("top_p"), - "topK": kwargs.get("top_k"), - } - } - system_prompt = "\n".join( - message["content"] - for message in messages - if message["role"] == "system" - ) - if system_prompt: - data["system_instruction"] = {"parts": {"text": system_prompt}} - async with session.post(url, params=params, json=data) as response: - if not response.ok: - data = await response.json() - data = data[0] if isinstance(data, list) else data - raise RuntimeError(f"Response {response.status}: {data['error']['message']}") - if stream: - lines = [] - async for chunk in response.content: - if chunk == b"[{\n": - lines = [b"{\n"] - elif chunk == b",\r\n" or chunk == b"]": - try: - data = b"".join(lines) - data = json.loads(data) - yield data["candidates"][0]["content"]["parts"][0]["text"] - except: - data = data.decode(errors="ignore") if isinstance(data, bytes) else data - raise RuntimeError(f"Read chunk failed: {data}") - lines = [] - else: - lines.append(chunk) - else: - data = await response.json() - yield data["candidates"][0]["content"]["parts"][0]["text"] \ No newline at end of file diff --git a/g4f/Provider/WhiteRabbitNeo.py b/g4f/Provider/WhiteRabbitNeo.py deleted file mode 100644 index 339434e6..00000000 --- a/g4f/Provider/WhiteRabbitNeo.py +++ /dev/null @@ -1,57 +0,0 @@ -from __future__ import annotations - -from aiohttp import ClientSession, BaseConnector - -from ..typing import AsyncResult, Messages, Cookies -from ..requests.raise_for_status import raise_for_status -from .base_provider import AsyncGeneratorProvider -from .helper import get_cookies, get_connector, get_random_string - -class WhiteRabbitNeo(AsyncGeneratorProvider): - url = "https://www.whiterabbitneo.com" - working = True - supports_message_history = True - needs_auth = True - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - cookies: Cookies = None, - connector: BaseConnector = None, - proxy: str = None, - **kwargs - ) -> AsyncResult: - if cookies is None: - cookies = get_cookies("www.whiterabbitneo.com") - headers = { - "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0", - "Accept": "*/*", - "Accept-Language": "de,en-US;q=0.7,en;q=0.3", - "Accept-Encoding": "gzip, deflate, br", - "Referer": f"{cls.url}/", - "Content-Type": "text/plain;charset=UTF-8", - "Origin": cls.url, - "Connection": "keep-alive", - "Sec-Fetch-Dest": "empty", - "Sec-Fetch-Mode": "cors", - "Sec-Fetch-Site": "same-origin", - "TE": "trailers" - } - async with ClientSession( - headers=headers, - cookies=cookies, - connector=get_connector(connector, proxy) - ) as session: - data = { - "messages": messages, - "id": get_random_string(6), - "enhancePrompt": False, - "useFunctions": False - } - async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response: - await raise_for_status(response) - async for chunk in response.content.iter_any(): - if chunk: - yield chunk.decode(errors="ignore") \ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index f297f4dc..19ddaa53 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -20,7 +20,6 @@ from .Allyfy import Allyfy from .AiMathGPT import AiMathGPT from .Airforce import Airforce from .Bing import Bing -from .BingCreateImages import BingCreateImages from .Blackbox import Blackbox from .ChatGpt import ChatGpt from .Chatgpt4Online import Chatgpt4Online @@ -32,7 +31,6 @@ from .DDG import DDG from .DeepInfraChat import DeepInfraChat from .Free2GPT import Free2GPT from .FreeGpt import FreeGpt -from .GeminiPro import GeminiPro from .GizAI import GizAI from .HuggingChat import HuggingChat from .Liaobots import Liaobots @@ -46,7 +44,6 @@ from .ReplicateHome import ReplicateHome from .RubiksAI import RubiksAI from .TeachAnything import TeachAnything from .Upstage import Upstage -from .WhiteRabbitNeo import WhiteRabbitNeo from .You import You import sys diff --git a/g4f/Provider/needs_auth/BingCreateImages.py b/g4f/Provider/needs_auth/BingCreateImages.py new file mode 100644 index 00000000..80984d40 --- /dev/null +++ b/g4f/Provider/needs_auth/BingCreateImages.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from ...cookies import get_cookies +from ...image import ImageResponse +from ...errors import MissingAuthError +from ...typing import AsyncResult, Messages, Cookies +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ..bing.create_images import create_images, create_session + +class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin): + label = "Microsoft Designer in Bing" + parent = "Bing" + url = "https://www.bing.com/images/create" + working = True + needs_auth = True + image_models = ["dall-e"] + + def __init__(self, cookies: Cookies = None, proxy: str = None, api_key: str = None) -> None: + if api_key is not None: + if cookies is None: + cookies = {} + cookies["_U"] = api_key + self.cookies = cookies + self.proxy = proxy + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + api_key: str = None, + cookies: Cookies = None, + proxy: str = None, + **kwargs + ) -> AsyncResult: + session = BingCreateImages(cookies, proxy, api_key) + yield await session.generate(messages[-1]["content"]) + + async def generate(self, prompt: str) -> ImageResponse: + """ + Asynchronously creates a markdown formatted string with images based on the prompt. + + Args: + prompt (str): Prompt to generate images. + + Returns: + str: Markdown formatted string with images. + """ + cookies = self.cookies or get_cookies(".bing.com", False) + if cookies is None or "_U" not in cookies: + raise MissingAuthError('Missing "_U" cookie') + async with create_session(cookies, self.proxy) as session: + images = await create_images(session, prompt) + return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"} if len(images) > 1 else {}) \ No newline at end of file diff --git a/g4f/Provider/needs_auth/GeminiPro.py b/g4f/Provider/needs_auth/GeminiPro.py new file mode 100644 index 00000000..5c170ae5 --- /dev/null +++ b/g4f/Provider/needs_auth/GeminiPro.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import base64 +import json +from aiohttp import ClientSession, BaseConnector + +from ...typing import AsyncResult, Messages, ImageType +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin +from ...image import to_bytes, is_accepted_format +from ...errors import MissingAuthError +from ..helper import get_connector + +class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin): + label = "Gemini API" + url = "https://ai.google.dev" + working = True + supports_message_history = True + needs_auth = True + default_model = "gemini-1.5-pro-latest" + default_vision_model = default_model + models = [default_model, "gemini-pro", "gemini-pro-vision", "gemini-1.5-flash"] + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + stream: bool = False, + proxy: str = None, + api_key: str = None, + api_base: str = "https://generativelanguage.googleapis.com/v1beta", + use_auth_header: bool = False, + image: ImageType = None, + connector: BaseConnector = None, + **kwargs + ) -> AsyncResult: + model = cls.get_model(model) + + if not api_key: + raise MissingAuthError('Add a "api_key"') + + headers = params = None + if use_auth_header: + headers = {"Authorization": f"Bearer {api_key}"} + else: + params = {"key": api_key} + + method = "streamGenerateContent" if stream else "generateContent" + url = f"{api_base.rstrip('/')}/models/{model}:{method}" + async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session: + contents = [ + { + "role": "model" if message["role"] == "assistant" else "user", + "parts": [{"text": message["content"]}] + } + for message in messages + if message["role"] != "system" + ] + if image is not None: + image = to_bytes(image) + contents[-1]["parts"].append({ + "inline_data": { + "mime_type": is_accepted_format(image), + "data": base64.b64encode(image).decode() + } + }) + data = { + "contents": contents, + "generationConfig": { + "stopSequences": kwargs.get("stop"), + "temperature": kwargs.get("temperature"), + "maxOutputTokens": kwargs.get("max_tokens"), + "topP": kwargs.get("top_p"), + "topK": kwargs.get("top_k"), + } + } + system_prompt = "\n".join( + message["content"] + for message in messages + if message["role"] == "system" + ) + if system_prompt: + data["system_instruction"] = {"parts": {"text": system_prompt}} + async with session.post(url, params=params, json=data) as response: + if not response.ok: + data = await response.json() + data = data[0] if isinstance(data, list) else data + raise RuntimeError(f"Response {response.status}: {data['error']['message']}") + if stream: + lines = [] + async for chunk in response.content: + if chunk == b"[{\n": + lines = [b"{\n"] + elif chunk == b",\r\n" or chunk == b"]": + try: + data = b"".join(lines) + data = json.loads(data) + yield data["candidates"][0]["content"]["parts"][0]["text"] + except: + data = data.decode(errors="ignore") if isinstance(data, bytes) else data + raise RuntimeError(f"Read chunk failed: {data}") + lines = [] + else: + lines.append(chunk) + else: + data = await response.json() + yield data["candidates"][0]["content"]["parts"][0]["text"] diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index f02121e3..3a0d6b29 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -55,6 +55,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin): label = "OpenAI ChatGPT" url = "https://chatgpt.com" working = True + needs_auth = True supports_gpt_4 = True supports_message_history = True supports_system_message = True diff --git a/g4f/Provider/needs_auth/WhiteRabbitNeo.py b/g4f/Provider/needs_auth/WhiteRabbitNeo.py new file mode 100644 index 00000000..82275c1c --- /dev/null +++ b/g4f/Provider/needs_auth/WhiteRabbitNeo.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from aiohttp import ClientSession, BaseConnector + +from ...typing import AsyncResult, Messages, Cookies +from ...requests.raise_for_status import raise_for_status +from ..base_provider import AsyncGeneratorProvider +from ..helper import get_cookies, get_connector, get_random_string + +class WhiteRabbitNeo(AsyncGeneratorProvider): + url = "https://www.whiterabbitneo.com" + working = True + supports_message_history = True + needs_auth = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + cookies: Cookies = None, + connector: BaseConnector = None, + proxy: str = None, + **kwargs + ) -> AsyncResult: + if cookies is None: + cookies = get_cookies("www.whiterabbitneo.com") + headers = { + "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0", + "Accept": "*/*", + "Accept-Language": "de,en-US;q=0.7,en;q=0.3", + "Accept-Encoding": "gzip, deflate, br", + "Referer": f"{cls.url}/", + "Content-Type": "text/plain;charset=UTF-8", + "Origin": cls.url, + "Connection": "keep-alive", + "Sec-Fetch-Dest": "empty", + "Sec-Fetch-Mode": "cors", + "Sec-Fetch-Site": "same-origin", + "TE": "trailers" + } + async with ClientSession( + headers=headers, + cookies=cookies, + connector=get_connector(connector, proxy) + ) as session: + data = { + "messages": messages, + "id": get_random_string(6), + "enhancePrompt": False, + "useFunctions": False + } + async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response: + await raise_for_status(response) + async for chunk in response.content.iter_any(): + if chunk: + yield chunk.decode(errors="ignore") diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py index 0626a837..e979f86d 100644 --- a/g4f/Provider/needs_auth/__init__.py +++ b/g4f/Provider/needs_auth/__init__.py @@ -1,18 +1,22 @@ from .gigachat import * -from .DeepInfra import DeepInfra -from .DeepInfraImage import DeepInfraImage -from .Gemini import Gemini -from .Raycast import Raycast -from .Theb import Theb -from .ThebApi import ThebApi -from .OpenaiChat import OpenaiChat -from .Poe import Poe -from .Openai import Openai -from .Groq import Groq -#from .OpenaiAccount import OpenaiAccount -from .PerplexityApi import PerplexityApi -from .Replicate import Replicate -from .MetaAI import MetaAI #from .MetaAIAccount import MetaAIAccount +#from .OpenaiAccount import OpenaiAccount + +from .BingCreateImages import BingCreateImages +from .DeepInfra import DeepInfra +from .DeepInfraImage import DeepInfraImage +from .Gemini import Gemini +from .GeminiPro import GeminiPro +from .Groq import Groq from .HuggingFace import HuggingFace +from .MetaAI import MetaAI +from .Openai import Openai +from .OpenaiChat import OpenaiChat +from .PerplexityApi import PerplexityApi +from .Poe import Poe +from .Raycast import Raycast +from .Replicate import Replicate +from .Theb import Theb +from .ThebApi import ThebApi +from .WhiteRabbitNeo import WhiteRabbitNeo -- cgit v1.2.3