diff options
Diffstat (limited to 'g4f/Provider/Llama.py')
-rw-r--r-- | g4f/Provider/Llama.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/g4f/Provider/Llama.py b/g4f/Provider/Llama.py index 8f3e9ea2..f2c78b36 100644 --- a/g4f/Provider/Llama.py +++ b/g4f/Provider/Llama.py @@ -11,7 +11,7 @@ class Llama(AsyncGeneratorProvider, ProviderModelMixin): url = "https://www.llama2.ai" working = True supports_message_history = True - default_model = "meta/llama-3-70b-chat" + default_model = "meta/meta-llama-3-70b-instruct" models = [ "meta/llama-2-7b-chat", "meta/llama-2-13b-chat", @@ -20,8 +20,8 @@ class Llama(AsyncGeneratorProvider, ProviderModelMixin): "meta/meta-llama-3-70b-instruct", ] model_aliases = { - "meta-llama/Meta-Llama-3-8b-instruct": "meta/meta-llama-3-8b-instruct", - "meta-llama/Meta-Llama-3-70b-instruct": "meta/meta-llama-3-70b-instruct", + "meta-llama/Meta-Llama-3-8B-Instruct": "meta/meta-llama-3-8b-instruct", + "meta-llama/Meta-Llama-3-70B-Instruct": "meta/meta-llama-3-70b-instruct", "meta-llama/Llama-2-7b-chat-hf": "meta/llama-2-7b-chat", "meta-llama/Llama-2-13b-chat-hf": "meta/llama-2-13b-chat", "meta-llama/Llama-2-70b-chat-hf": "meta/llama-2-70b-chat", |