summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-09-29 22:38:25 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-09-29 22:38:25 +0200
commit58db9e03f0a25613cf90cf8184ebf159123e7477 (patch)
treee926d6f5551b4eb069e35b41479275056999e6c9 /g4f/models.py
parentdocs/providers-and-models.md (diff)
downloadgpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar
gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.gz
gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.bz2
gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.lz
gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.xz
gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.tar.zst
gpt4free-58db9e03f0a25613cf90cf8184ebf159123e7477.zip
Diffstat (limited to '')
-rw-r--r--g4f/models.py32
1 files changed, 16 insertions, 16 deletions
diff --git a/g4f/models.py b/g4f/models.py
index 8a8d4e18..2940b96a 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -115,7 +115,7 @@ gpt_4o = Model(
name = 'gpt-4o',
base_provider = 'OpenAI',
best_provider = IterListProvider([
- Liaobots, Airforce, Chatgpt4o, ChatGptEs,
+ Liaobots, Nexra, Airforce, Chatgpt4o, ChatGptEs,
OpenaiChat
])
)
@@ -211,7 +211,7 @@ llama_3_1_405b = Model(
llama_3_1 = Model(
name = "llama-3.1",
base_provider = "Meta Llama",
- best_provider = IterListProvider([llama_3_1_8b.best_provider, llama_3_1_70b.best_provider, llama_3_1_405b.best_provider,])
+ best_provider = IterListProvider([Nexra, llama_3_1_8b.best_provider, llama_3_1_70b.best_provider, llama_3_1_405b.best_provider,])
)
@@ -273,7 +273,7 @@ phi_3_5_mini = Model(
gemini_pro = Model(
name = 'gemini-pro',
base_provider = 'Google DeepMind',
- best_provider = IterListProvider([GeminiPro, LiteIcoding, Blackbox, AIChatFree, GPROChat, Liaobots, Airforce])
+ best_provider = IterListProvider([GeminiPro, LiteIcoding, Blackbox, AIChatFree, GPROChat, Nexra, Liaobots, Airforce])
)
gemini_flash = Model(
@@ -285,10 +285,7 @@ gemini_flash = Model(
gemini = Model(
name = 'gemini',
base_provider = 'Google DeepMind',
- best_provider = IterListProvider([
- Gemini,
- gemini_flash.best_provider, gemini_pro.best_provider
- ])
+ best_provider = IterListProvider([Gemini, gemini_flash.best_provider, gemini_pro.best_provider])
)
# gemma
@@ -458,9 +455,7 @@ qwen_turbo = Model(
qwen = Model(
name = 'qwen',
base_provider = 'Qwen',
- best_provider = IterListProvider([
- qwen_1_5_14b.best_provider, qwen_1_5_72b.best_provider, qwen_1_5_110b.best_provider, qwen_2_72b.best_provider, qwen_turbo.best_provider
- ])
+ best_provider = IterListProvider([Nexra, qwen_1_5_14b.best_provider, qwen_1_5_72b.best_provider, qwen_1_5_110b.best_provider, qwen_2_72b.best_provider, qwen_turbo.best_provider])
)
@@ -639,7 +634,7 @@ sonar_chat = Model(
sdxl = Model(
name = 'sdxl',
base_provider = 'Stability AI',
- best_provider = IterListProvider([ReplicateHome, DeepInfraImage])
+ best_provider = IterListProvider([ReplicateHome, Nexra, DeepInfraImage])
)
@@ -734,10 +729,7 @@ dalle_3 = Model(
dalle = Model(
name = 'dalle',
base_provider = '',
- best_provider = IterListProvider([
- Nexra,
- dalle_2.best_provider, dalle_3.best_provider,
- ])
+ best_provider = IterListProvider([Nexra, dalle_2.best_provider, dalle_3.best_provider])
)
@@ -748,7 +740,7 @@ dalle_mini = Model(
)
-### ###
+### Other ###
emi = Model(
name = 'emi',
base_provider = '',
@@ -763,6 +755,13 @@ any_dark = Model(
)
+prodia = Model(
+ name = 'prodia',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
class ModelUtils:
"""
Utility class for mapping string identifiers to Model instances.
@@ -985,6 +984,7 @@ class ModelUtils:
'dalle-mini': dalle_mini,
'emi': emi,
'any-dark': any_dark,
+'prodia': prodia,
}
_all_models = list(ModelUtils.convert.keys())