summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/models.py250
1 files changed, 201 insertions, 49 deletions
diff --git a/g4f/models.py b/g4f/models.py
index e9016561..0c66fd66 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -5,9 +5,12 @@ from dataclasses import dataclass
from .Provider import IterListProvider, ProviderType
from .Provider import (
AI365VIP,
+ Allyfy,
Bing,
Blackbox,
+ ChatGot,
Chatgpt4o,
+ Chatgpt4Online,
ChatgptFree,
DDG,
DeepInfra,
@@ -84,6 +87,7 @@ gpt_35_long = Model(
DDG,
AI365VIP,
Pizzagpt,
+ Allyfy,
])
)
@@ -107,6 +111,7 @@ gpt_35_turbo = Model(
DDG,
AI365VIP,
Pizzagpt,
+ Allyfy,
])
)
@@ -133,7 +138,7 @@ gpt_4 = Model(
name = 'gpt-4',
base_provider = 'openai',
best_provider = IterListProvider([
- Bing, Liaobots,
+ Bing, Liaobots, Chatgpt4Online
])
)
@@ -165,7 +170,15 @@ gpt_4o = Model(
name = 'gpt-4o',
base_provider = 'openai',
best_provider = IterListProvider([
- You, Liaobots, Chatgpt4o, AI365VIP
+ You, Liaobots, Chatgpt4o, AI365VIP, OpenaiChat
+ ])
+)
+
+gpt_4o_mini = Model(
+ name = 'gpt-4o-mini',
+ base_provider = 'openai',
+ best_provider = IterListProvider([
+ Liaobots, OpenaiChat, You,
])
)
@@ -185,12 +198,6 @@ meta = Model(
best_provider = MetaAI
)
-llama_2_70b_chat = Model(
- name = "meta/llama-2-70b-chat",
- base_provider = "meta",
- best_provider = IterListProvider([ReplicateHome])
-)
-
llama3_8b_instruct = Model(
name = "meta-llama/Meta-Llama-3-8B-Instruct",
base_provider = "meta",
@@ -200,7 +207,19 @@ llama3_8b_instruct = Model(
llama3_70b_instruct = Model(
name = "meta-llama/Meta-Llama-3-70B-Instruct",
base_provider = "meta",
- best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate, HuggingChat, DDG])
+ best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate, DDG, ReplicateHome])
+)
+
+llama_3_1_70b_Instruct = Model(
+ name = "meta-llama/Meta-Llama-3.1-70B-Instruct",
+ base_provider = "meta",
+ best_provider = IterListProvider([HuggingChat, HuggingFace])
+)
+
+llama_3_1_405b_Instruct_FP8 = Model(
+ name = "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
+ base_provider = "meta",
+ best_provider = IterListProvider([HuggingChat, HuggingFace])
)
codellama_34b_instruct = Model(
@@ -220,13 +239,13 @@ codellama_70b_instruct = Model(
mixtral_8x7b = Model(
name = "mistralai/Mixtral-8x7B-Instruct-v0.1",
base_provider = "huggingface",
- best_provider = IterListProvider([DeepInfra, HuggingFace, PerplexityLabs, HuggingChat, DDG])
+ best_provider = IterListProvider([DeepInfra, HuggingFace, PerplexityLabs, HuggingChat, DDG, ReplicateHome])
)
mistral_7b_v02 = Model(
name = "mistralai/Mistral-7B-Instruct-v0.2",
base_provider = "huggingface",
- best_provider = IterListProvider([DeepInfra, HuggingFace, HuggingChat, ReplicateHome])
+ best_provider = IterListProvider([DeepInfra, HuggingFace, HuggingChat])
)
@@ -265,10 +284,22 @@ gemini = Model(
gemini_pro = Model(
name = 'gemini-pro',
base_provider = 'Google',
- best_provider = IterListProvider([GeminiPro, You, GeminiProChat])
+ best_provider = IterListProvider([GeminiPro, You, ChatGot, GeminiProChat, Liaobots])
+)
+
+gemini_flash = Model(
+ name = 'gemini-flash',
+ base_provider = 'Google',
+ best_provider = IterListProvider([Liaobots])
)
# gemma
+gemma_2b_it = Model(
+ name = 'gemma-2b-it',
+ base_provider = 'Google',
+ best_provider = IterListProvider([ReplicateHome])
+)
+
gemma_2_9b_it = Model(
name = 'gemma-2-9b-it',
base_provider = 'Google',
@@ -283,28 +314,46 @@ gemma_2_27b_it = Model(
### Anthropic ###
-claude_v2 = Model(
- name = 'claude-v2',
- base_provider = 'anthropic',
- best_provider = IterListProvider([Vercel])
+claude_2 = Model(
+ name = 'claude-2',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([You])
+)
+
+claude_2_0 = Model(
+ name = 'claude-2.0',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
+)
+
+claude_2_1 = Model(
+ name = 'claude-2.1',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
)
claude_3_opus = Model(
name = 'claude-3-opus',
- base_provider = 'anthropic',
- best_provider = You
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([You, Liaobots])
)
claude_3_sonnet = Model(
name = 'claude-3-sonnet',
- base_provider = 'anthropic',
- best_provider = You
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([You, Liaobots])
+)
+
+claude_3_5_sonnet = Model(
+ name = 'claude-3-5-sonnet',
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([Liaobots])
)
claude_3_haiku = Model(
name = 'claude-3-haiku',
- base_provider = 'anthropic',
- best_provider = IterListProvider([DDG, AI365VIP])
+ base_provider = 'Anthropic',
+ best_provider = IterListProvider([DDG, AI365VIP, Liaobots])
)
@@ -348,6 +397,58 @@ command_r_plus = Model(
)
+### iFlytek ###
+SparkDesk_v1_1 = Model(
+ name = 'SparkDesk-v1.1',
+ base_provider = 'iFlytek',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+
+### DeepSeek ###
+deepseek_coder = Model(
+ name = 'deepseek-coder',
+ base_provider = 'DeepSeek',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+deepseek_chat = Model(
+ name = 'deepseek-chat',
+ base_provider = 'DeepSeek',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+
+### Qwen ###
+Qwen2_7B_Instruct = Model(
+ name = 'Qwen2-7B-Instruct',
+ base_provider = 'Qwen',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+
+### Zhipu AI ###
+glm4_9B_chat = Model(
+ name = 'glm4-9B-chat',
+ base_provider = 'Zhipu AI',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+chatglm3_6B = Model(
+ name = 'chatglm3-6B',
+ base_provider = 'Zhipu AI',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+
+### 01-ai ###
+Yi_1_5_9B_Chat = Model(
+ name = 'Yi-1.5-9B-Chat',
+ base_provider = '01-ai',
+ best_provider = IterListProvider([FreeChatgpt])
+)
+
+
### Other ###
pi = Model(
name = 'pi',
@@ -364,14 +465,27 @@ pi = Model(
sdxl = Model(
name = 'stability-ai/sdxl',
base_provider = 'Stability AI',
- best_provider = IterListProvider([ReplicateHome, DeepInfraImage])
+ best_provider = IterListProvider([DeepInfraImage])
+
+)
+
+stable_diffusion_3 = Model(
+ name = 'stability-ai/stable-diffusion-3',
+ base_provider = 'Stability AI',
+ best_provider = IterListProvider([ReplicateHome])
+
+)
+
+sdxl_lightning_4step = Model(
+ name = 'bytedance/sdxl-lightning-4step',
+ base_provider = 'Stability AI',
+ best_provider = IterListProvider([ReplicateHome])
)
-### AI Forever ###
-kandinsky_2_2 = Model(
- name = 'ai-forever/kandinsky-2.2',
- base_provider = 'AI Forever',
+playground_v2_5_1024px_aesthetic = Model(
+ name = 'playgroundai/playground-v2.5-1024px-aesthetic',
+ base_provider = 'Stability AI',
best_provider = IterListProvider([ReplicateHome])
)
@@ -385,12 +499,12 @@ class ModelUtils:
"""
convert: dict[str, Model] = {
- ############
- ### Text ###
- ############
+ ############
+ ### Text ###
+ ############
- ### OpenAI ###
- ### GPT-3.5 / GPT-4 ###
+ ### OpenAI ###
+ ### GPT-3.5 / GPT-4 ###
# gpt-3.5
'gpt-3.5-turbo' : gpt_35_turbo,
'gpt-3.5-turbo-0613' : gpt_35_turbo_0613,
@@ -400,6 +514,7 @@ class ModelUtils:
# gpt-4
'gpt-4o' : gpt_4o,
+ 'gpt-4o-mini' : gpt_4o_mini,
'gpt-4' : gpt_4,
'gpt-4-0613' : gpt_4_0613,
'gpt-4-32k' : gpt_4_32k,
@@ -407,14 +522,16 @@ class ModelUtils:
'gpt-4-turbo' : gpt_4_turbo,
- ### Meta ###
+ ### Meta ###
"meta-ai": meta,
- 'llama-2-70b-chat': llama_2_70b_chat,
'llama3-8b': llama3_8b_instruct, # alias
'llama3-70b': llama3_70b_instruct, # alias
'llama3-8b-instruct' : llama3_8b_instruct,
'llama3-70b-instruct': llama3_70b_instruct,
+ 'llama-3.1-70b-Instruct': llama_3_1_70b_Instruct,
+ 'llama-3.1-405B-Instruct-FP8': llama_3_1_405b_Instruct_FP8,
+
'codellama-34b-instruct': codellama_34b_instruct,
'codellama-70b-instruct': codellama_70b_instruct,
@@ -426,31 +543,36 @@ class ModelUtils:
### NousResearch ###
- 'Nous-Hermes-2-Mixtral-8x7B-DPO': Nous_Hermes_2_Mixtral_8x7B_DPO,
+ 'Nous-Hermes-2-Mixtral-8x7B-DPO': Nous_Hermes_2_Mixtral_8x7B_DPO,
- ### 01-ai ###
- 'Yi-1.5-34B-Chat': Yi_1_5_34B_Chat,
+ ### 01-ai ###
+ 'Yi-1.5-34B-Chat': Yi_1_5_34B_Chat,
- ### Microsoft ###
- 'Phi-3-mini-4k-instruct': Phi_3_mini_4k_instruct,
+ ### Microsoft ###
+ 'Phi-3-mini-4k-instruct': Phi_3_mini_4k_instruct,
### Google ###
# gemini
'gemini': gemini,
'gemini-pro': gemini_pro,
+ 'gemini-flash': gemini_flash,
# gemma
+ 'gemma-2b-it': gemma_2b_it,
'gemma-2-9b-it': gemma_2_9b_it,
'gemma-2-27b-it': gemma_2_27b_it,
### Anthropic ###
- 'claude-v2': claude_v2,
+ 'claude-2': claude_2,
+ 'claude-2.0': claude_2_0,
+ 'claude-2.1': claude_2_1,
'claude-3-opus': claude_3_opus,
'claude-3-sonnet': claude_3_sonnet,
+ 'claude-3-5-sonnet': claude_3_5_sonnet,
'claude-3-haiku': claude_3_haiku,
@@ -462,11 +584,11 @@ class ModelUtils:
'nemotron-4-340b-instruct': nemotron_4_340b_instruct,
- ### Blackbox ###
- 'blackbox': blackbox,
+ ### Blackbox ###
+ 'blackbox': blackbox,
- ### CohereForAI ###
+ ### CohereForAI ###
'command-r+': command_r_plus,
@@ -474,24 +596,54 @@ class ModelUtils:
'dbrx-instruct': dbrx_instruct,
- ### GigaChat ###
+ ### GigaChat ###
'gigachat': gigachat,
+ ### iFlytek ###
+ 'SparkDesk-v1.1': SparkDesk_v1_1,
+
+
+ ### DeepSeek ###
+ 'deepseek-coder': deepseek_coder,
+ 'deepseek-chat': deepseek_chat,
+
+
+ ### ### Qwen ### ###
+ 'Qwen2-7B-Instruct': Qwen2_7B_Instruct,
+
+
+ ### Zhipu AI ###
+ 'glm4-9B-chat': glm4_9B_chat,
+ 'chatglm3-6B': chatglm3_6B,
+
+
+ ### 01-ai ###
+ 'Yi-1.5-9B-Chat': Yi_1_5_9B_Chat,
+
+
# Other
'pi': pi,
#############
- ### Image ###
- #############
+ ### Image ###
+ #############
- ### Stability AI ###
+ ### Stability AI ###
'sdxl': sdxl,
+ 'stable-diffusion-3': stable_diffusion_3,
+
+ ### ByteDance ###
+ 'sdxl-lightning-4step': sdxl_lightning_4step,
- ### AI Forever ###
- 'kandinsky-2.2': kandinsky_2_2,
+ ### ByteDance ###
+ 'sdxl-lightning-4step': sdxl_lightning_4step,
+
+ ### Playground ###
+ 'playground-v2.5-1024px-aesthetic': playground_v2_5_1024px_aesthetic,
+
}
_all_models = list(ModelUtils.convert.keys())