summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-11-04 14:37:04 +0100
committerkqlio67 <kqlio67@users.noreply.github.com>2024-11-04 14:37:04 +0100
commit8c7791aae38ef364182fc8676d2e7349f9341a4c (patch)
tree226fe36a867beb214b66e54fd35a3b5ae10a7aba /g4f/models.py
parentMerge pull request #3 from rkihacker/main (diff)
downloadgpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.tar
gpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.tar.gz
gpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.tar.bz2
gpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.tar.lz
gpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.tar.xz
gpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.tar.zst
gpt4free-8c7791aae38ef364182fc8676d2e7349f9341a4c.zip
Diffstat (limited to 'g4f/models.py')
-rw-r--r--g4f/models.py30
1 files changed, 11 insertions, 19 deletions
diff --git a/g4f/models.py b/g4f/models.py
index 944c4e9c..8b258201 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -137,13 +137,13 @@ gpt_35_turbo = Model(
gpt_4o = Model(
name = 'gpt-4o',
base_provider = 'OpenAI',
- best_provider = IterListProvider([NexraChatGPT, Blackbox, ChatGptEs, DarkAI, Editee, GizAI, Airforce, Liaobots, OpenaiChat])
+ best_provider = IterListProvider([Blackbox, ChatGptEs, DarkAI, Editee, NexraChatGPT, Airforce, Liaobots, OpenaiChat])
)
gpt_4o_mini = Model(
name = 'gpt-4o-mini',
base_provider = 'OpenAI',
- best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, RubiksAI, Liaobots, Airforce, GizAI, ChatgptFree, Koala, OpenaiChat, ChatGpt])
+ best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, RubiksAI, Liaobots, Airforce, ChatgptFree, Koala, OpenaiChat, ChatGpt])
)
gpt_4_turbo = Model(
@@ -168,7 +168,7 @@ o1 = Model(
o1_mini = Model(
name = 'o1-mini',
base_provider = 'OpenAI',
- best_provider = IterListProvider([GizAI])
+ best_provider = None
)
@@ -217,13 +217,13 @@ llama_3_70b = Model(
llama_3_1_8b = Model(
name = "llama-3.1-8b",
base_provider = "Meta Llama",
- best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, GizAI, Airforce, PerplexityLabs])
+ best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, Airforce, PerplexityLabs])
)
llama_3_1_70b = Model(
name = "llama-3.1-70b",
base_provider = "Meta Llama",
- best_provider = IterListProvider([DDG, HuggingChat, Blackbox, FreeGpt, TeachAnything, Free2GPT, DeepInfraChat, DarkAI, AiMathGPT, RubiksAI, GizAI, Airforce, HuggingFace, PerplexityLabs])
+ best_provider = IterListProvider([DDG, HuggingChat, Blackbox, FreeGpt, TeachAnything, Free2GPT, DeepInfraChat, DarkAI, AiMathGPT, RubiksAI, Airforce, HuggingFace, PerplexityLabs])
)
llama_3_1_405b = Model(
@@ -312,7 +312,7 @@ mistral_nemo = Model(
mistral_large = Model(
name = "mistral-large",
base_provider = "Mistral",
- best_provider = IterListProvider([Editee, GizAI])
+ best_provider = IterListProvider([Editee])
)
@@ -360,7 +360,7 @@ phi_3_5_mini = Model(
gemini_pro = Model(
name = 'gemini-pro',
base_provider = 'Google DeepMind',
- best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, GPROChat, NexraGeminiPro, Editee, GizAI, Airforce, Liaobots])
+ best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, GPROChat, NexraGeminiPro, Editee, Airforce, Liaobots])
)
gemini_flash = Model(
@@ -431,14 +431,14 @@ claude_3_sonnet = Model(
claude_3_haiku = Model(
name = 'claude-3-haiku',
base_provider = 'Anthropic',
- best_provider = IterListProvider([DDG, GizAI, Liaobots])
+ best_provider = IterListProvider([DDG, Liaobots])
)
# claude 3.5
claude_3_5_sonnet = Model(
name = 'claude-3.5-sonnet',
base_provider = 'Anthropic',
- best_provider = IterListProvider([Blackbox, Editee, GizAI, Liaobots])
+ best_provider = IterListProvider([Blackbox, Editee, Liaobots])
)
@@ -777,7 +777,7 @@ sdxl = Model(
sd_1_5 = Model(
name = 'sd-1.5',
base_provider = 'Stability AI',
- best_provider = IterListProvider([NexraSD15, GizAI])
+ best_provider = IterListProvider([NexraSD15])
)
@@ -788,13 +788,6 @@ sd_3 = Model(
)
-sd_3_5 = Model(
- name = 'sd-3.5',
- base_provider = 'Stability AI',
- best_provider = GizAI
-
-)
-
### Playground ###
playground_v2_5 = Model(
name = 'playground-v2.5',
@@ -864,7 +857,7 @@ flux_4o = Model(
flux_schnell = Model(
name = 'flux-schnell',
base_provider = 'Flux AI',
- best_provider = IterListProvider([ReplicateHome, GizAI])
+ best_provider = IterListProvider([ReplicateHome])
)
@@ -1156,7 +1149,6 @@ class ModelUtils:
'sdxl-turbo': sdxl_turbo,
'sd-1.5': sd_1_5,
'sd-3': sd_3,
-'sd-3.5': sd_3_5,
### Playground ###