diff options
Diffstat (limited to 'g4f/Provider')
-rw-r--r-- | g4f/Provider/FlowGpt.py | 2 | ||||
-rw-r--r-- | g4f/Provider/HuggingChat.py | 3 | ||||
-rw-r--r-- | g4f/Provider/Llama.py (renamed from g4f/Provider/Llama2.py) | 8 | ||||
-rw-r--r-- | g4f/Provider/PerplexityLabs.py | 7 | ||||
-rw-r--r-- | g4f/Provider/__init__.py | 2 |
5 files changed, 12 insertions, 10 deletions
diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py index 7edd6f19..6c2aa046 100644 --- a/g4f/Provider/FlowGpt.py +++ b/g4f/Provider/FlowGpt.py @@ -99,4 +99,4 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): if "event" not in message: continue if message["event"] == "text": - yield message["data"]
\ No newline at end of file + yield message["data"] diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index 882edb78..668ce4b1 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -19,7 +19,8 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'google/gemma-1.1-7b-it', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - 'mistralai/Mistral-7B-Instruct-v0.2' + 'mistralai/Mistral-7B-Instruct-v0.2', + 'meta-llama/Meta-Llama-3-70B-Instruct' ] model_aliases = { "openchat/openchat_3.5": "openchat/openchat-3.5-0106", diff --git a/g4f/Provider/Llama2.py b/g4f/Provider/Llama.py index 04b5aee0..8f3e9ea2 100644 --- a/g4f/Provider/Llama2.py +++ b/g4f/Provider/Llama.py @@ -7,17 +7,21 @@ from ..requests.raise_for_status import raise_for_status from .base_provider import AsyncGeneratorProvider, ProviderModelMixin -class Llama2(AsyncGeneratorProvider, ProviderModelMixin): +class Llama(AsyncGeneratorProvider, ProviderModelMixin): url = "https://www.llama2.ai" working = True supports_message_history = True - default_model = "meta/llama-2-70b-chat" + default_model = "meta/llama-3-70b-chat" models = [ "meta/llama-2-7b-chat", "meta/llama-2-13b-chat", "meta/llama-2-70b-chat", + "meta/meta-llama-3-8b-instruct", + "meta/meta-llama-3-70b-instruct", ] model_aliases = { + "meta-llama/Meta-Llama-3-8b-instruct": "meta/meta-llama-3-8b-instruct", + "meta-llama/Meta-Llama-3-70b-instruct": "meta/meta-llama-3-70b-instruct", "meta-llama/Llama-2-7b-chat-hf": "meta/llama-2-7b-chat", "meta-llama/Llama-2-13b-chat-hf": "meta/llama-2-13b-chat", "meta-llama/Llama-2-70b-chat-hf": "meta/llama-2-70b-chat", diff --git a/g4f/Provider/PerplexityLabs.py b/g4f/Provider/PerplexityLabs.py index ab36d284..b6fec53c 100644 --- a/g4f/Provider/PerplexityLabs.py +++ b/g4f/Provider/PerplexityLabs.py @@ -15,10 +15,7 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin): working = True default_model = "mixtral-8x7b-instruct" models = [ - "sonar-small-online", "sonar-medium-online", "sonar-small-chat", "sonar-medium-chat", "mistral-7b-instruct", - "codellama-70b-instruct", "llava-v1.5-7b-wrapper", "llava-v1.6-34b", "mixtral-8x7b-instruct", - "gemma-2b-it", "gemma-7b-it" - "mistral-medium", "related", "dbrx-instruct" + "sonar-small-online", "sonar-medium-online", "sonar-small-chat", "sonar-medium-chat", "dbrx-instruct", "claude-3-haiku-20240307", "llama-3-8b-instruct", "llama-3-70b-instruct", "codellama-70b-instruct", "mistral-7b-instruct", "llava-v1.5-7b-wrapper", "llava-v1.6-34b", "mixtral-8x7b-instruct", "mixtral-8x22b-instruct", "mistral-medium", "gemma-2b-it", "gemma-7b-it", "related" ] model_aliases = { "mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct", @@ -93,4 +90,4 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin): if data["final"]: break except: - raise RuntimeError(f"Message: {message}")
\ No newline at end of file + raise RuntimeError(f"Message: {message}") diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index d5913e3c..f761df5b 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -40,7 +40,7 @@ from .HuggingChat import HuggingChat from .HuggingFace import HuggingFace from .Koala import Koala from .Liaobots import Liaobots -from .Llama2 import Llama2 +from .Llama import Llama from .Local import Local from .PerplexityLabs import PerplexityLabs from .Pi import Pi |