From 087a4d684c456ca93e2689083074ed909974e929 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Wed, 6 Nov 2024 14:10:19 +0200 Subject: Update (g4f/Provider/DeepInfra.py g4f/Provider/__init__.py g4f/Provider/needs_auth/) --- g4f/Provider/DeepInfra.py | 58 ------------------------------------ g4f/Provider/__init__.py | 1 - g4f/Provider/needs_auth/DeepInfra.py | 58 ++++++++++++++++++++++++++++++++++++ g4f/Provider/needs_auth/__init__.py | 1 + 4 files changed, 59 insertions(+), 59 deletions(-) delete mode 100644 g4f/Provider/DeepInfra.py create mode 100644 g4f/Provider/needs_auth/DeepInfra.py (limited to 'g4f') diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py deleted file mode 100644 index b12fb254..00000000 --- a/g4f/Provider/DeepInfra.py +++ /dev/null @@ -1,58 +0,0 @@ -from __future__ import annotations - -import requests -from ..typing import AsyncResult, Messages -from .needs_auth.Openai import Openai - -class DeepInfra(Openai): - label = "DeepInfra" - url = "https://deepinfra.com" - working = True - needs_auth = True - supports_stream = True - supports_message_history = True - default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" - - @classmethod - def get_models(cls): - if not cls.models: - url = 'https://api.deepinfra.com/models/featured' - models = requests.get(url).json() - cls.models = [model['model_name'] for model in models if model["type"] == "text-generation"] - return cls.models - - @classmethod - def create_async_generator( - cls, - model: str, - messages: Messages, - stream: bool, - api_base: str = "https://api.deepinfra.com/v1/openai", - temperature: float = 0.7, - max_tokens: int = 1028, - **kwargs - ) -> AsyncResult: - headers = { - 'Accept-Encoding': 'gzip, deflate, br', - 'Accept-Language': 'en-US', - 'Connection': 'keep-alive', - 'Origin': 'https://deepinfra.com', - 'Referer': 'https://deepinfra.com/', - 'Sec-Fetch-Dest': 'empty', - 'Sec-Fetch-Mode': 'cors', - 'Sec-Fetch-Site': 'same-site', - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36', - 'X-Deepinfra-Source': 'web-embed', - 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - } - return super().create_async_generator( - model, messages, - stream=stream, - api_base=api_base, - temperature=temperature, - max_tokens=max_tokens, - headers=headers, - **kwargs - ) \ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 048ce504..55fabd25 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -35,7 +35,6 @@ from .ChatifyAI import ChatifyAI from .Cloudflare import Cloudflare from .DarkAI import DarkAI from .DDG import DDG -from .DeepInfra import DeepInfra from .DeepInfraChat import DeepInfraChat from .DeepInfraImage import DeepInfraImage from .Editee import Editee diff --git a/g4f/Provider/needs_auth/DeepInfra.py b/g4f/Provider/needs_auth/DeepInfra.py new file mode 100644 index 00000000..ebe5bfbf --- /dev/null +++ b/g4f/Provider/needs_auth/DeepInfra.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import requests +from ...typing import AsyncResult, Messages +from .Openai import Openai + +class DeepInfra(Openai): + label = "DeepInfra" + url = "https://deepinfra.com" + working = True + needs_auth = True + supports_stream = True + supports_message_history = True + default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" + + @classmethod + def get_models(cls): + if not cls.models: + url = 'https://api.deepinfra.com/models/featured' + models = requests.get(url).json() + cls.models = [model['model_name'] for model in models if model["type"] == "text-generation"] + return cls.models + + @classmethod + def create_async_generator( + cls, + model: str, + messages: Messages, + stream: bool, + api_base: str = "https://api.deepinfra.com/v1/openai", + temperature: float = 0.7, + max_tokens: int = 1028, + **kwargs + ) -> AsyncResult: + headers = { + 'Accept-Encoding': 'gzip, deflate, br', + 'Accept-Language': 'en-US', + 'Connection': 'keep-alive', + 'Origin': 'https://deepinfra.com', + 'Referer': 'https://deepinfra.com/', + 'Sec-Fetch-Dest': 'empty', + 'Sec-Fetch-Mode': 'cors', + 'Sec-Fetch-Site': 'same-site', + 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36', + 'X-Deepinfra-Source': 'web-embed', + 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + } + return super().create_async_generator( + model, messages, + stream=stream, + api_base=api_base, + temperature=temperature, + max_tokens=max_tokens, + headers=headers, + **kwargs + ) diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py index 0492645d..aa3547a5 100644 --- a/g4f/Provider/needs_auth/__init__.py +++ b/g4f/Provider/needs_auth/__init__.py @@ -1,3 +1,4 @@ +from .DeepInfra import DeepInfra from .Gemini import Gemini from .Raycast import Raycast from .Theb import Theb -- cgit v1.2.3