summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2024-10-22 23:32:27 +0200
committerGitHub <noreply@github.com>2024-10-22 23:32:27 +0200
commita63c18de796bd4f3e818ff170b6ff595304f95e0 (patch)
tree844dbb9a8d3526a8b60564b78f7a19a4e0f605d9 /g4f/models.py
parentMerge pull request #2282 from Karasiq/patch-1 (diff)
parentUpdated docs/providers-and-models.md g4f/models.py g4f/Provider/Upstage.py (diff)
downloadgpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.tar
gpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.tar.gz
gpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.tar.bz2
gpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.tar.lz
gpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.tar.xz
gpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.tar.zst
gpt4free-a63c18de796bd4f3e818ff170b6ff595304f95e0.zip
Diffstat (limited to '')
-rw-r--r--g4f/models.py65
1 files changed, 43 insertions, 22 deletions
diff --git a/g4f/models.py b/g4f/models.py
index e84f9103..1cea6447 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -1,4 +1,3 @@
-# g4f/models.py
from __future__ import annotations
from dataclasses import dataclass
@@ -40,6 +39,7 @@ from .Provider import (
Liaobots,
MagickPen,
MetaAI,
+ NexraBing,
NexraBlackbox,
NexraChatGPT,
NexraChatGPT4o,
@@ -47,11 +47,14 @@ from .Provider import (
NexraChatGptWeb,
NexraDallE,
NexraDallE2,
- NexraDalleMini,
NexraEmi,
NexraFluxPro,
- NexraLLaMA31,
+ NexraGeminiPro,
+ NexraMidjourney,
NexraQwen,
+ NexraSD15,
+ NexraSDLora,
+ NexraSDTurbo,
OpenaiChat,
PerplexityLabs,
Pi,
@@ -105,7 +108,6 @@ default = Model(
AmigoChat,
ChatifyAI,
Cloudflare,
- Ai4Chat,
Editee,
AiMathGPT,
])
@@ -152,7 +154,7 @@ gpt_4_turbo = Model(
gpt_4 = Model(
name = 'gpt-4',
base_provider = 'OpenAI',
- best_provider = IterListProvider([NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Ai4Chat, Airforce, Chatgpt4Online, Bing, OpenaiChat])
+ best_provider = IterListProvider([Chatgpt4Online, Ai4Chat, NexraBing, NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Airforce, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider])
)
# o1
@@ -214,7 +216,7 @@ llama_3_70b = Model(
llama_3_1_8b = Model(
name = "llama-3.1-8b",
base_provider = "Meta Llama",
- best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, NexraLLaMA31, Airforce, PerplexityLabs])
+ best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, Airforce, PerplexityLabs])
)
llama_3_1_70b = Model(
@@ -345,7 +347,7 @@ phi_3_5_mini = Model(
gemini_pro = Model(
name = 'gemini-pro',
base_provider = 'Google DeepMind',
- best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, GPROChat, AmigoChat, Editee, Liaobots, Airforce])
+ best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, GPROChat, NexraGeminiPro, AmigoChat, Editee, Liaobots, Airforce])
)
gemini_flash = Model(
@@ -734,6 +736,20 @@ nemotron_70b = Model(
#############
### Stability AI ###
+sdxl_turbo = Model(
+ name = 'sdxl-turbo',
+ base_provider = 'Stability AI',
+ best_provider = NexraSDTurbo
+
+)
+
+sdxl_lora = Model(
+ name = 'sdxl-lora',
+ base_provider = 'Stability AI',
+ best_provider = NexraSDLora
+
+)
+
sdxl = Model(
name = 'sdxl',
base_provider = 'Stability AI',
@@ -741,6 +757,13 @@ sdxl = Model(
)
+sd_1_5 = Model(
+ name = 'sd-1.5',
+ base_provider = 'Stability AI',
+ best_provider = NexraSD15
+
+)
+
sd_3 = Model(
name = 'sd-3',
base_provider = 'Stability AI',
@@ -768,7 +791,7 @@ flux = Model(
flux_pro = Model(
name = 'flux-pro',
base_provider = 'Flux AI',
- best_provider = IterListProvider([NexraFluxPro, AmigoChat])
+ best_provider = IterListProvider([AmigoChat, NexraFluxPro])
)
@@ -829,12 +852,6 @@ dalle_2 = Model(
best_provider = NexraDallE2
)
-dalle_3 = Model(
- name = 'dalle-3',
- base_provider = 'OpenAI',
- best_provider = Airforce
-
-)
dalle = Model(
name = 'dalle',
@@ -843,14 +860,14 @@ dalle = Model(
)
-dalle_mini = Model(
- name = 'dalle-mini',
- base_provider = 'OpenAI',
- best_provider = NexraDalleMini
+### Midjourney ###
+midjourney = Model(
+ name = 'midjourney',
+ base_provider = 'Midjourney',
+ best_provider = NexraMidjourney
)
-
### Other ###
emi = Model(
name = 'emi',
@@ -1019,7 +1036,7 @@ class ModelUtils:
### Upstage ###
-'solar-1-mini': solar_1_mini,
+'solar-mini': solar_1_mini,
'solar-10-7b': solar_10_7b,
'solar-pro': solar_pro,
@@ -1102,6 +1119,9 @@ class ModelUtils:
### Stability AI ###
'sdxl': sdxl,
+'sdxl-lora': sdxl_lora,
+'sdxl-turbo': sdxl_turbo,
+'sd-1.5': sd_1_5,
'sd-3': sd_3,
@@ -1124,8 +1144,9 @@ class ModelUtils:
### OpenAI ###
'dalle': dalle,
'dalle-2': dalle_2,
-'dalle-3': dalle_3,
-'dalle-mini': dalle_mini,
+
+### Midjourney ###
+'midjourney': midjourney,
### Other ###