summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/models.py')
-rw-r--r--g4f/models.py844
1 files changed, 574 insertions, 270 deletions
diff --git a/g4f/models.py b/g4f/models.py
index e9016561..877f6d42 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -4,36 +4,50 @@ from dataclasses import dataclass
from .Provider import IterListProvider, ProviderType
from .Provider import (
- AI365VIP,
- Bing,
- Blackbox,
- Chatgpt4o,
- ChatgptFree,
- DDG,
- DeepInfra,
- DeepInfraImage,
- FreeChatgpt,
- FreeGpt,
- Gemini,
- GeminiPro,
- GeminiProChat,
- GigaChat,
- HuggingChat,
- HuggingFace,
- Koala,
- Liaobots,
- MetaAI,
- OpenaiChat,
- PerplexityLabs,
- Pi,
- Pizzagpt,
- Reka,
- Replicate,
- ReplicateHome,
- Vercel,
- You,
+ AiChatOnline,
+ Airforce,
+ Allyfy,
+ Bing,
+ Binjie,
+ Bixin123,
+ Blackbox,
+ ChatGot,
+ Chatgpt4Online,
+ Chatgpt4o,
+ ChatgptFree,
+ CodeNews,
+ DDG,
+ DeepInfra,
+ DeepInfraImage,
+ Free2GPT,
+ FreeChatgpt,
+ FreeGpt,
+ FreeNetfly,
+ Gemini,
+ GeminiPro,
+ GigaChat,
+ HuggingChat,
+ HuggingFace,
+ Koala,
+ Liaobots,
+ MagickPen,
+ MetaAI,
+ Nexra,
+ OpenaiChat,
+ PerplexityLabs,
+ Pi,
+ Pizzagpt,
+ Reka,
+ Replicate,
+ ReplicateHome,
+ Snova,
+ TeachAnything,
+ TwitterBio,
+ Upstage,
+ You,
)
+
@dataclass(unsafe_hash=True)
class Model:
"""
@@ -57,33 +71,18 @@ default = Model(
name = "",
base_provider = "",
best_provider = IterListProvider([
- Bing,
- You,
- OpenaiChat,
- FreeChatgpt,
- AI365VIP,
- Chatgpt4o,
DDG,
- ChatgptFree,
- Koala,
- Pizzagpt,
- ])
-)
-
-# GPT-3.5 too, but all providers supports long requests and responses
-gpt_35_long = Model(
- name = 'gpt-3.5-turbo',
- base_provider = 'openai',
- best_provider = IterListProvider([
- FreeGpt,
- You,
- OpenaiChat,
- Koala,
- ChatgptFree,
FreeChatgpt,
- DDG,
- AI365VIP,
+ HuggingChat,
Pizzagpt,
+ ChatgptFree,
+ ReplicateHome,
+ Upstage,
+ Blackbox,
+ Bixin123,
+ Binjie,
+ Free2GPT,
+ MagickPen,
])
)
@@ -92,84 +91,60 @@ gpt_35_long = Model(
############
### OpenAI ###
-### GPT-3.5 / GPT-4 ###
+# gpt-3
+gpt_3 = Model(
+ name = 'gpt-3',
+ base_provider = 'OpenAI',
+ best_provider = IterListProvider([
+ Nexra,
+ ])
+)
+
# gpt-3.5
gpt_35_turbo = Model(
name = 'gpt-3.5-turbo',
- base_provider = 'openai',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- FreeGpt,
- You,
- Koala,
- OpenaiChat,
- ChatgptFree,
- FreeChatgpt,
- DDG,
- AI365VIP,
- Pizzagpt,
+ Allyfy, TwitterBio, Nexra, Bixin123, CodeNews, Airforce,
])
)
-gpt_35_turbo_16k = Model(
- name = 'gpt-3.5-turbo-16k',
- base_provider = 'openai',
- best_provider = gpt_35_long.best_provider
-)
-
-gpt_35_turbo_16k_0613 = Model(
- name = 'gpt-3.5-turbo-16k-0613',
- base_provider = 'openai',
- best_provider = gpt_35_long.best_provider
-)
-
-gpt_35_turbo_0613 = Model(
- name = 'gpt-3.5-turbo-0613',
- base_provider = 'openai',
- best_provider = gpt_35_turbo.best_provider
-)
-
# gpt-4
-gpt_4 = Model(
- name = 'gpt-4',
- base_provider = 'openai',
+gpt_4o = Model(
+ name = 'gpt-4o',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- Bing, Liaobots,
+ Liaobots, Chatgpt4o, Airforce,
+ OpenaiChat,
])
)
-gpt_4_0613 = Model(
- name = 'gpt-4-0613',
- base_provider = 'openai',
- best_provider = gpt_4.best_provider
-)
-
-gpt_4_32k = Model(
- name = 'gpt-4-32k',
- base_provider = 'openai',
- best_provider = gpt_4.best_provider
-)
-
-gpt_4_32k_0613 = Model(
- name = 'gpt-4-32k-0613',
- base_provider = 'openai',
- best_provider = gpt_4.best_provider
+gpt_4o_mini = Model(
+ name = 'gpt-4o-mini',
+ base_provider = 'OpenAI',
+ best_provider = IterListProvider([
+ DDG, Liaobots, You, FreeNetfly, Pizzagpt, ChatgptFree, AiChatOnline, CodeNews, MagickPen, Airforce,
+ OpenaiChat, Koala,
+ ])
)
gpt_4_turbo = Model(
name = 'gpt-4-turbo',
- base_provider = 'openai',
- best_provider = Bing
+ base_provider = 'OpenAI',
+ best_provider = IterListProvider([
+ Nexra, Bixin123, Liaobots, Airforce, Bing
+ ])
)
-gpt_4o = Model(
- name = 'gpt-4o',
- base_provider = 'openai',
+gpt_4 = Model(
+ name = 'gpt-4',
+ base_provider = 'OpenAI',
best_provider = IterListProvider([
- You, Liaobots, Chatgpt4o, AI365VIP
+ Chatgpt4Online, Nexra, Binjie, Airforce, Bing,
+ gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider
])
)
-
### GigaChat ###
gigachat = Model(
name = 'GigaChat:latest',
@@ -180,131 +155,174 @@ gigachat = Model(
### Meta ###
meta = Model(
- name = "meta",
- base_provider = "meta",
+ name = "meta-ai",
+ base_provider = "Meta",
best_provider = MetaAI
)
-llama_2_70b_chat = Model(
- name = "meta/llama-2-70b-chat",
- base_provider = "meta",
- best_provider = IterListProvider([ReplicateHome])
+llama_2_13b = Model(
+ name = "llama-2-13b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([Airforce])
)
-llama3_8b_instruct = Model(
- name = "meta-llama/Meta-Llama-3-8B-Instruct",
- base_provider = "meta",
- best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate])
+llama_3_8b = Model(
+ name = "llama-3-8b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([Airforce, DeepInfra, Replicate])
)
-llama3_70b_instruct = Model(
- name = "meta-llama/Meta-Llama-3-70B-Instruct",
- base_provider = "meta",
- best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate, HuggingChat, DDG])
+llama_3_70b = Model(
+ name = "llama-3-70b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([ReplicateHome, Airforce, DeepInfra, Replicate])
)
-codellama_34b_instruct = Model(
- name = "codellama/CodeLlama-34b-Instruct-hf",
- base_provider = "meta",
- best_provider = HuggingChat
+llama_3_1_8b = Model(
+ name = "llama-3.1-8b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([Blackbox, Airforce, PerplexityLabs])
)
-codellama_70b_instruct = Model(
- name = "codellama/CodeLlama-70b-Instruct-hf",
- base_provider = "meta",
- best_provider = IterListProvider([DeepInfra])
+llama_3_1_70b = Model(
+ name = "llama-3.1-70b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([DDG, HuggingChat, FreeGpt, Blackbox, TeachAnything, Free2GPT, Airforce, HuggingFace, PerplexityLabs])
+)
+
+llama_3_1_405b = Model(
+ name = "llama-3.1-405b",
+ base_provider = "Meta",
+ best_provider = IterListProvider([Blackbox, Airforce])
)
### Mistral ###
+mistral_7b = Model(
+ name = "mistral-7b",
+ base_provider = "Mistral",
+ best_provider = IterListProvider([HuggingChat, Airforce, HuggingFace, DeepInfra])
+)
+
mixtral_8x7b = Model(
- name = "mistralai/Mixtral-8x7B-Instruct-v0.1",
- base_provider = "huggingface",
- best_provider = IterListProvider([DeepInfra, HuggingFace, PerplexityLabs, HuggingChat, DDG])
+ name = "mixtral-8x7b",
+ base_provider = "Mistral",
+ best_provider = IterListProvider([HuggingChat, DDG, ReplicateHome, TwitterBio, Airforce, DeepInfra, HuggingFace])
)
-mistral_7b_v02 = Model(
- name = "mistralai/Mistral-7B-Instruct-v0.2",
- base_provider = "huggingface",
- best_provider = IterListProvider([DeepInfra, HuggingFace, HuggingChat, ReplicateHome])
+mixtral_8x22b = Model(
+ name = "mixtral-8x22b",
+ base_provider = "Mistral",
+ best_provider = IterListProvider([Airforce])
)
### NousResearch ###
-Nous_Hermes_2_Mixtral_8x7B_DPO = Model(
- name = "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
+mixtral_8x7b_dpo = Model(
+ name = "mixtral-8x7b-dpo",
base_provider = "NousResearch",
- best_provider = IterListProvider([HuggingFace, HuggingChat])
+ best_provider = IterListProvider([HuggingChat, Airforce, HuggingFace])
)
-
-### 01-ai ###
-Yi_1_5_34B_Chat = Model(
- name = "01-ai/Yi-1.5-34B-Chat",
- base_provider = "01-ai",
- best_provider = IterListProvider([HuggingFace, HuggingChat])
+yi_34b = Model(
+ name = 'yi-34b',
+ base_provider = 'NousResearch',
+ best_provider = IterListProvider([Airforce])
)
### Microsoft ###
-Phi_3_mini_4k_instruct = Model(
- name = "microsoft/Phi-3-mini-4k-instruct",
+phi_3_mini_4k = Model(
+ name = "phi-3-mini-4k",
base_provider = "Microsoft",
- best_provider = IterListProvider([HuggingFace, HuggingChat])
+ best_provider = IterListProvider([HuggingChat, HuggingFace])
)
### Google ###
# gemini
-gemini = Model(
- name = 'gemini',
+gemini_pro = Model(
+ name = 'gemini-pro',
base_provider = 'Google',
- best_provider = Gemini
+ best_provider = IterListProvider([GeminiPro, ChatGot, Liaobots, Airforce])
)
-gemini_pro = Model(
- name = 'gemini-pro',
+gemini_flash = Model(
+ name = 'gemini-flash',
+ base_provider = 'Google',
+ best_provider = IterListProvider([Blackbox, Liaobots, Airforce])
+)
+
+gemini = Model(
+ name = 'gemini',
base_provider = 'Google',
- best_provider = IterListProvider([GeminiPro, You, GeminiProChat])
+ best_provider = IterListProvider([
+ Gemini,
+ gemini_flash.best_provider, gemini_pro.best_provider
+ ])
)
+
# gemma
-gemma_2_9b_it = Model(
- name = 'gemma-2-9b-it',
+gemma_2b = Model(
+ name = 'gemma-2b',
base_provider = 'Google',
- best_provider = IterListProvider([PerplexityLabs])
+ best_provider = IterListProvider([ReplicateHome, Airforce])
)
-gemma_2_27b_it = Model(
- name = 'gemma-2-27b-it',
+gemma_2b_9b = Model(
+ name = 'gemma-2b-9b',
base_provider = 'Google',
- best_provider = IterListProvider([PerplexityLabs])
+ best_provider = IterListProvider([Airforce])
)
+gemma_2b_27b = Model(
+ name = 'gemma-2b-27b',
+ base_provider = 'Google',
+ best_provider = IterListProvider([Airforce])
+)
### Anthropic ###
-claude_v2 = Model(
- name = 'claude-v2',
- base_provider = 'anthropic',
- best_provider = IterListProvider([Vercel])
+claude_2 = Model(
+ name = 'claude-2',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([You])
+)
+
+claude_2_0 = Model(
+ name = 'claude-2.0',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
+)
+
+claude_2_1 = Model(
+ name = 'claude-2.1',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
)
claude_3_opus = Model(
name = 'claude-3-opus',
- base_provider = 'anthropic',
- best_provider = You
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
)
claude_3_sonnet = Model(
name = 'claude-3-sonnet',
- base_provider = 'anthropic',
- best_provider = You
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
+)
+
+claude_3_5_sonnet = Model(
+ name = 'claude-3-5-sonnet',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
)
claude_3_haiku = Model(
name = 'claude-3-haiku',
- base_provider = 'anthropic',
- best_provider = IterListProvider([DDG, AI365VIP])
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([DDG, Liaobots])
)
@@ -316,14 +334,6 @@ reka_core = Model(
)
-### NVIDIA ###
-nemotron_4_340b_instruct = Model(
- name = 'nemotron-4-340b-instruct',
- base_provider = 'NVIDIA',
- best_provider = IterListProvider([PerplexityLabs])
-)
-
-
### Blackbox ###
blackbox = Model(
name = 'blackbox',
@@ -334,27 +344,157 @@ blackbox = Model(
### Databricks ###
dbrx_instruct = Model(
- name = 'databricks/dbrx-instruct',
+ name = 'dbrx-instruct',
base_provider = 'Databricks',
- best_provider = IterListProvider([DeepInfra])
+ best_provider = IterListProvider([Airforce, DeepInfra])
)
### CohereForAI ###
command_r_plus = Model(
- name = 'CohereForAI/c4ai-command-r-plus',
+ name = 'command-r-plus',
base_provider = 'CohereForAI',
best_provider = IterListProvider([HuggingChat])
)
-### Other ###
+### iFlytek ###
+sparkdesk_v1_1 = Model(
+ name = 'sparkdesk-v1.1',
+ base_provider = 'iFlytek',
+ best_provider = IterListProvider([FreeChatgpt, Airforce])
+)
+
+### Qwen ###
+qwen_1_5_14b = Model(
+ name = 'qwen-1.5-14b',
+ base_provider = 'Qwen',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+qwen_1_5_72b = Model(
+ name = 'qwen-1.5-72b',
+ base_provider = 'Qwen',
+ best_provider = IterListProvider([Airforce])
+)
+
+qwen_1_5_110b = Model(
+ name = 'qwen-1.5-110b',
+ base_provider = 'Qwen',
+ best_provider = IterListProvider([Airforce])
+)
+
+qwen_2_72b = Model(
+ name = 'qwen-2-72b',
+ base_provider = 'Qwen',
+ best_provider = IterListProvider([Airforce])
+)
+
+qwen_turbo = Model(
+ name = 'qwen-turbo',
+ base_provider = 'Qwen',
+ best_provider = IterListProvider([Bixin123])
+)
+
+
+### Zhipu AI ###
+glm_3_6b = Model(
+ name = 'glm-3-6b',
+ base_provider = 'Zhipu AI',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+glm_4_9b = Model(
+ name = 'glm-4-9B',
+ base_provider = 'Zhipu AI',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+glm_4 = Model(
+ name = 'glm-4',
+ base_provider = 'Zhipu AI',
+ best_provider = IterListProvider([
+ CodeNews,
+ glm_3_6b.best_provider, glm_4_9b.best_provider
+ ])
+)
+
+### 01-ai ###
+yi_1_5_9b = Model(
+ name = 'yi-1.5-9b',
+ base_provider = '01-ai',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+### Upstage ###
+solar_1_mini = Model(
+ name = 'solar-1-mini',
+ base_provider = 'Upstage',
+ best_provider = IterListProvider([Upstage])
+)
+
+solar_10_7b = Model(
+ name = 'solar-10-7b',
+ base_provider = 'Upstage',
+ best_provider = Airforce
+)
+
+
+### Pi ###
pi = Model(
name = 'pi',
base_provider = 'inflection',
best_provider = Pi
)
+### SambaNova ###
+samba_coe_v0_1 = Model(
+ name = 'samba-coe-v0.1',
+ base_provider = 'SambaNova',
+ best_provider = Snova
+)
+
+### Trong-Hieu Nguyen-Mau ###
+v1olet_merged_7b = Model(
+ name = 'v1olet-merged-7b',
+ base_provider = 'Trong-Hieu Nguyen-Mau',
+ best_provider = Snova
+)
+
+### Macadeliccc ###
+westlake_7b_v2 = Model(
+ name = 'westlake-7b-v2',
+ base_provider = 'Macadeliccc',
+ best_provider = Snova
+)
+
+### DeepSeek ###
+deepseek = Model(
+ name = 'deepseek',
+ base_provider = 'DeepSeek',
+ best_provider = IterListProvider([CodeNews, Airforce])
+)
+
+### WizardLM ###
+wizardlm_2_8x22b = Model(
+ name = 'wizardlm-2-8x22b',
+ base_provider = 'WizardLM',
+ best_provider = Airforce
+)
+
+### Together ###
+sh_n_7b = Model(
+ name = 'sh-n-7b',
+ base_provider = 'Together',
+ best_provider = Airforce
+)
+
+### Yorickvp ###
+llava_13b = Model(
+ name = 'llava-13b',
+ base_provider = 'Yorickvp',
+ best_provider = ReplicateHome
+)
#############
### Image ###
@@ -362,20 +502,113 @@ pi = Model(
### Stability AI ###
sdxl = Model(
- name = 'stability-ai/sdxl',
+ name = 'sdxl',
base_provider = 'Stability AI',
best_provider = IterListProvider([ReplicateHome, DeepInfraImage])
)
-### AI Forever ###
-kandinsky_2_2 = Model(
- name = 'ai-forever/kandinsky-2.2',
- base_provider = 'AI Forever',
+sd_3 = Model(
+ name = 'sd-3',
+ base_provider = 'Stability AI',
+ best_provider = IterListProvider([ReplicateHome])
+
+)
+
+### Playground ###
+playground_v2_5 = Model(
+ name = 'playground-v2.5',
+ base_provider = 'Stability AI',
best_provider = IterListProvider([ReplicateHome])
)
+### Flux AI ###
+flux = Model(
+ name = 'flux',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([Airforce])
+
+)
+
+flux_realism = Model(
+ name = 'flux-realism',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([Airforce])
+
+)
+
+flux_anime = Model(
+ name = 'flux-anime',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([Airforce])
+
+)
+
+flux_3d = Model(
+ name = 'flux-3d',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([Airforce])
+
+)
+
+flux_disney = Model(
+ name = 'flux-disney',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([Airforce])
+
+)
+
+flux_pixel = Model(
+ name = 'flux-pixel',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([Airforce])
+
+)
+
+flux_schnell = Model(
+ name = 'flux-schnell',
+ base_provider = 'Flux AI',
+ best_provider = IterListProvider([ReplicateHome])
+
+)
+
+### ###
+dalle = Model(
+ name = 'dalle',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
+dalle_2 = Model(
+ name = 'dalle-2',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
+dalle_mini = Model(
+ name = 'dalle-mini',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
+emi = Model(
+ name = 'emi',
+ base_provider = '',
+ best_provider = IterListProvider([Nexra])
+
+)
+
+any_dark = Model(
+ name = 'any-dark',
+ base_provider = '',
+ best_provider = IterListProvider([Airforce])
+
+)
+
class ModelUtils:
"""
Utility class for mapping string identifiers to Model instances.
@@ -385,113 +618,184 @@ class ModelUtils:
"""
convert: dict[str, Model] = {
- ############
- ### Text ###
- ############
-
- ### OpenAI ###
- ### GPT-3.5 / GPT-4 ###
- # gpt-3.5
- 'gpt-3.5-turbo' : gpt_35_turbo,
- 'gpt-3.5-turbo-0613' : gpt_35_turbo_0613,
- 'gpt-3.5-turbo-16k' : gpt_35_turbo_16k,
- 'gpt-3.5-turbo-16k-0613' : gpt_35_turbo_16k_0613,
- 'gpt-3.5-long': gpt_35_long,
-
- # gpt-4
- 'gpt-4o' : gpt_4o,
- 'gpt-4' : gpt_4,
- 'gpt-4-0613' : gpt_4_0613,
- 'gpt-4-32k' : gpt_4_32k,
- 'gpt-4-32k-0613' : gpt_4_32k_0613,
- 'gpt-4-turbo' : gpt_4_turbo,
-
-
- ### Meta ###
- "meta-ai": meta,
+############
+### Text ###
+############
- 'llama-2-70b-chat': llama_2_70b_chat,
- 'llama3-8b': llama3_8b_instruct, # alias
- 'llama3-70b': llama3_70b_instruct, # alias
- 'llama3-8b-instruct' : llama3_8b_instruct,
- 'llama3-70b-instruct': llama3_70b_instruct,
+### OpenAI ###
+# gpt-3
+'gpt-3': gpt_3,
- 'codellama-34b-instruct': codellama_34b_instruct,
- 'codellama-70b-instruct': codellama_70b_instruct,
+# gpt-3.5
+'gpt-3.5-turbo': gpt_35_turbo,
+
+# gpt-4
+'gpt-4o' : gpt_4o,
+'gpt-4o-mini' : gpt_4o_mini,
+'gpt-4' : gpt_4,
+'gpt-4-turbo' : gpt_4_turbo,
+
+
+### Meta ###
+"meta-ai": meta,
+# llama-2
+'llama-2-13b': llama_2_13b,
- ### Mistral (Opensource) ###
- 'mixtral-8x7b': mixtral_8x7b,
- 'mistral-7b-v02': mistral_7b_v02,
+# llama-3
+'llama-3-8b': llama_3_8b,
+'llama-3-70b': llama_3_70b,
+# llama-3.1
+'llama-3.1-8b': llama_3_1_8b,
+'llama-3.1-70b': llama_3_1_70b,
+'llama-3.1-405b': llama_3_1_405b,
+
- ### NousResearch ###
- 'Nous-Hermes-2-Mixtral-8x7B-DPO': Nous_Hermes_2_Mixtral_8x7B_DPO,
+### Mistral ###
+'mistral-7b': mistral_7b,
+'mixtral-8x7b': mixtral_8x7b,
+'mixtral-8x22b': mixtral_8x22b,
+
+
+### NousResearch ###
+'mixtral-8x7b-dpo': mixtral_8x7b_dpo,
+
+'yi-34b': yi_34b,
+
+
+### Microsoft ###
+'phi-3-mini-4k': phi_3_mini_4k,
- ### 01-ai ###
- 'Yi-1.5-34B-Chat': Yi_1_5_34B_Chat,
-
-
- ### Microsoft ###
- 'Phi-3-mini-4k-instruct': Phi_3_mini_4k_instruct,
+### Google ###
+# gemini
+'gemini': gemini,
+'gemini-pro': gemini_pro,
+'gemini-flash': gemini_flash,
+
+# gemma
+'gemma-2b': gemma_2b,
+'gemma-2b-9b': gemma_2b_9b,
+'gemma-2b-27b': gemma_2b_27b,
- ### Google ###
- # gemini
- 'gemini': gemini,
- 'gemini-pro': gemini_pro,
+### Anthropic ###
+'claude-2': claude_2,
+'claude-2.0': claude_2_0,
+'claude-2.1': claude_2_1,
+
+'claude-3-opus': claude_3_opus,
+'claude-3-sonnet': claude_3_sonnet,
+'claude-3-haiku': claude_3_haiku,
+'claude-3-5-sonnet': claude_3_5_sonnet,
+
+
+### Reka AI ###
+'reka-core': reka_core,
+
+
+### Blackbox ###
+'blackbox': blackbox,
+
+
+### CohereForAI ###
+'command-r+': command_r_plus,
- # gemma
- 'gemma-2-9b-it': gemma_2_9b_it,
- 'gemma-2-27b-it': gemma_2_27b_it,
+
+### Databricks ###
+'dbrx-instruct': dbrx_instruct,
- ### Anthropic ###
- 'claude-v2': claude_v2,
- 'claude-3-opus': claude_3_opus,
- 'claude-3-sonnet': claude_3_sonnet,
- 'claude-3-haiku': claude_3_haiku,
+### GigaChat ###
+'gigachat': gigachat,
+
+
+### iFlytek ###
+'sparkdesk-v1.1': sparkdesk_v1_1,
+
+
+### Qwen ###
+'qwen-1.5-14b': qwen_1_5_14b,
+'qwen-1.5-72b': qwen_1_5_72b,
+'qwen-1.5-110b': qwen_1_5_110b,
+'qwen-2-72b': qwen_2_72b,
+'qwen-turbo': qwen_turbo,
+
+
+### Zhipu AI ###
+'glm-3-6b': glm_3_6b,
+'glm-4-9b': glm_4_9b,
+'glm-4': glm_4,
+
+
+### 01-ai ###
+'yi-1.5-9b': yi_1_5_9b,
+
+
+### Upstage ###
+'solar-1-mini': solar_1_mini,
+'solar-10-7b': solar_10_7b,
- ### Reka AI ###
- 'reka': reka_core,
+### Pi ###
+'pi': pi,
- ### NVIDIA ###
- 'nemotron-4-340b-instruct': nemotron_4_340b_instruct,
-
-
- ### Blackbox ###
- 'blackbox': blackbox,
-
-
- ### CohereForAI ###
- 'command-r+': command_r_plus,
-
-
- ### Databricks ###
- 'dbrx-instruct': dbrx_instruct,
+### SambaNova ###
+'samba-coe-v0.1': samba_coe_v0_1,
+
+
+### Trong-Hieu Nguyen-Mau ###
+'v1olet-merged-7b': v1olet_merged_7b,
+
+
+### Macadeliccc ###
+'westlake-7b-v2': westlake_7b_v2,
+
+### DeepSeek ###
+'deepseek': deepseek,
- ### GigaChat ###
- 'gigachat': gigachat,
+
+### Together ###
+'sh-n-7b': sh_n_7b,
+
+
+### Yorickvp ###
+'llava-13b': llava_13b,
- # Other
- 'pi': pi,
+#############
+### Image ###
+#############
+### Stability AI ###
+'sdxl': sdxl,
+'sd-3': sd_3,
- #############
- ### Image ###
- #############
-
- ### Stability AI ###
- 'sdxl': sdxl,
- ### AI Forever ###
- 'kandinsky-2.2': kandinsky_2_2,
+### Playground ###
+'playground-v2.5': playground_v2_5,
+
+
+### Flux AI ###
+'flux': flux,
+'flux-realism': flux_realism,
+'flux-anime': flux_anime,
+'flux-3d': flux_3d,
+'flux-disney': flux_disney,
+'flux-pixel': flux_pixel,
+'flux-schnell': flux_schnell,
+
+
+### ###
+'dalle': dalle,
+'dalle-2': dalle_2,
+'dalle-mini': dalle_mini,
+'emi': emi,
+'any-dark': any_dark,
}
_all_models = list(ModelUtils.convert.keys())