summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-12 20:58:40 +0200
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-12 20:58:40 +0200
commitf724c07f1b8cb1b2360d8c6675688b15cbcd85ca (patch)
tree30a7f0a12c3f959fc5da519356b9d82c6f1520d7
parentupdate default gpt-3.5-turbo models (diff)
downloadgpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.tar
gpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.tar.gz
gpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.tar.bz2
gpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.tar.lz
gpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.tar.xz
gpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.tar.zst
gpt4free-f724c07f1b8cb1b2360d8c6675688b15cbcd85ca.zip
-rw-r--r--g4f/Provider/Bing.py18
-rw-r--r--g4f/Provider/BingCreateImages.py1
-rw-r--r--g4f/Provider/DeepInfra.py1
-rw-r--r--g4f/Provider/HuggingChat.py2
-rw-r--r--g4f/Provider/Local.py1
-rw-r--r--g4f/Provider/needs_auth/Groq.py1
-rw-r--r--g4f/Provider/needs_auth/OpenRouter.py1
-rw-r--r--g4f/Provider/needs_auth/Openai.py1
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py1
-rw-r--r--g4f/Provider/needs_auth/Theb.py1
-rw-r--r--g4f/Provider/needs_auth/ThebApi.py1
-rw-r--r--g4f/gui/client/static/js/chat.v1.js5
-rw-r--r--g4f/gui/server/api.py11
13 files changed, 34 insertions, 11 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index 955717a2..cef2f681 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -6,7 +6,7 @@ import uuid
import time
import asyncio
from urllib import parse
-from datetime import datetime
+from datetime import datetime, date
from aiohttp import ClientSession, ClientTimeout, BaseConnector, WSMsgType
from ..typing import AsyncResult, Messages, ImageType, Cookies
@@ -32,6 +32,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
"""
Bing provider for generating responses using the Bing API.
"""
+ label = "Microsoft Copilot in Bing"
url = "https://bing.com/chat"
working = True
supports_message_history = True
@@ -47,7 +48,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
proxy: str = None,
timeout: int = 900,
api_key: str = None,
- cookies: Cookies = {},
+ cookies: Cookies = None,
connector: BaseConnector = None,
tone: str = None,
image: ImageType = None,
@@ -69,8 +70,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
:return: An asynchronous result object.
"""
prompt = messages[-1]["content"]
- if api_key is not None:
- cookies["_U"] = api_key
if context is None:
context = create_context(messages[:-1]) if len(messages) > 1 else None
if tone is None:
@@ -79,7 +78,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
return stream_generate(
- prompt, tone, image, context, cookies,
+ prompt, tone, image, context, api_key, cookies,
get_connector(connector, proxy, True),
proxy, web_search, gpt4_turbo, timeout,
**kwargs
@@ -110,11 +109,15 @@ def get_default_cookies():
'SUID' : '',
'SRCHUSR' : '',
'SRCHHPGUSR' : f'HV={int(time.time())}',
+ 'BCP' : 'AD=1&AL=1&SM=1',
+ '_Rwho' : f'u=d&ts={date.today().isoformat()}',
}
-def create_headers(cookies: Cookies = None) -> dict:
+def create_headers(cookies: Cookies = None, api_key: str = None) -> dict:
if cookies is None:
cookies = get_default_cookies()
+ if api_key is not None:
+ cookies["_U"] = api_key
headers = Defaults.headers.copy()
headers["cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
headers["x-forwarded-for"] = get_ip_address()
@@ -364,6 +367,7 @@ async def stream_generate(
image: ImageType = None,
context: str = None,
cookies: dict = None,
+ api_key: str = None,
connector: BaseConnector = None,
proxy: str = None,
web_search: bool = False,
@@ -389,7 +393,7 @@ async def stream_generate(
:param timeout: Timeout for the request.
:return: An asynchronous generator yielding responses.
"""
- headers = create_headers(cookies)
+ headers = create_headers(cookies, api_key)
new_conversation = conversation is None
max_retries = (5 if new_conversation else 0) if max_retries is None else max_retries
async with ClientSession(
diff --git a/g4f/Provider/BingCreateImages.py b/g4f/Provider/BingCreateImages.py
index 51e06dc5..a7bd54e4 100644
--- a/g4f/Provider/BingCreateImages.py
+++ b/g4f/Provider/BingCreateImages.py
@@ -12,6 +12,7 @@ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .bing.create_images import create_images, create_session, get_cookies_from_browser
class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
+ label = "Microsoft Designer"
url = "https://www.bing.com/images/create"
working = True
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index fb809b7f..68aaf8b9 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -5,6 +5,7 @@ from ..typing import AsyncResult, Messages
from .needs_auth.Openai import Openai
class DeepInfra(Openai):
+ label = "DeepInfra"
url = "https://deepinfra.com"
working = True
needs_auth = False
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py
index 9003c749..b80795fe 100644
--- a/g4f/Provider/HuggingChat.py
+++ b/g4f/Provider/HuggingChat.py
@@ -12,7 +12,7 @@ from .helper import format_prompt, get_connector
class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://huggingface.co/chat"
working = True
- default_model = "meta-llama/Llama-2-70b-chat-hf"
+ default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
models = [
"mistralai/Mixtral-8x7B-Instruct-v0.1",
"google/gemma-7b-it",
diff --git a/g4f/Provider/Local.py b/g4f/Provider/Local.py
index b4d096de..c08064b9 100644
--- a/g4f/Provider/Local.py
+++ b/g4f/Provider/Local.py
@@ -12,6 +12,7 @@ from ..providers.base_provider import AbstractProvider, ProviderModelMixin
from ..errors import MissingRequirementsError
class Local(AbstractProvider, ProviderModelMixin):
+ label = "gpt4all"
working = True
supports_message_history = True
supports_system_message = True
diff --git a/g4f/Provider/needs_auth/Groq.py b/g4f/Provider/needs_auth/Groq.py
index 87e87e60..922b2dd2 100644
--- a/g4f/Provider/needs_auth/Groq.py
+++ b/g4f/Provider/needs_auth/Groq.py
@@ -4,6 +4,7 @@ from .Openai import Openai
from ...typing import AsyncResult, Messages
class Groq(Openai):
+ lebel = "Groq"
url = "https://console.groq.com/playground"
working = True
default_model = "mixtral-8x7b-32768"
diff --git a/g4f/Provider/needs_auth/OpenRouter.py b/g4f/Provider/needs_auth/OpenRouter.py
index e5f87076..773d9203 100644
--- a/g4f/Provider/needs_auth/OpenRouter.py
+++ b/g4f/Provider/needs_auth/OpenRouter.py
@@ -6,6 +6,7 @@ from .Openai import Openai
from ...typing import AsyncResult, Messages
class OpenRouter(Openai):
+ label = "OpenRouter"
url = "https://openrouter.ai"
working = True
default_model = "openrouter/auto"
diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py
index ea09e950..81ba5981 100644
--- a/g4f/Provider/needs_auth/Openai.py
+++ b/g4f/Provider/needs_auth/Openai.py
@@ -9,6 +9,7 @@ from ...requests import StreamSession, raise_for_status
from ...errors import MissingAuthError, ResponseError
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
+ label = "OpenAI API"
url = "https://openai.com"
working = True
needs_auth = True
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 7491725f..3145161a 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -35,6 +35,7 @@ from ... import debug
class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
"""A class for creating and managing conversations with OpenAI chat service"""
+ lebel = "OpenAI ChatGPT"
url = "https://chat.openai.com"
working = True
supports_gpt_35_turbo = True
diff --git a/g4f/Provider/needs_auth/Theb.py b/g4f/Provider/needs_auth/Theb.py
index 2930e35d..af690063 100644
--- a/g4f/Provider/needs_auth/Theb.py
+++ b/g4f/Provider/needs_auth/Theb.py
@@ -32,6 +32,7 @@ models = {
}
class Theb(AbstractProvider):
+ label = "TheB.AI"
url = "https://beta.theb.ai"
working = True
supports_gpt_35_turbo = True
diff --git a/g4f/Provider/needs_auth/ThebApi.py b/g4f/Provider/needs_auth/ThebApi.py
index 48879bcb..22fc62ed 100644
--- a/g4f/Provider/needs_auth/ThebApi.py
+++ b/g4f/Provider/needs_auth/ThebApi.py
@@ -28,6 +28,7 @@ models = {
}
class ThebApi(Openai):
+ label = "TheB.AI API"
url = "https://theb.ai"
working = True
needs_auth = True
diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js
index 46f7e808..8933b442 100644
--- a/g4f/gui/client/static/js/chat.v1.js
+++ b/g4f/gui/client/static/js/chat.v1.js
@@ -1026,9 +1026,10 @@ async function on_api() {
});
providers = await api("providers")
- providers.forEach((provider) => {
+ Object.entries(providers).forEach(([provider, label]) => {
let option = document.createElement("option");
- option.value = option.text = provider;
+ option.value = provider;
+ option.text = label;
providerSelect.appendChild(option);
})
diff --git a/g4f/gui/server/api.py b/g4f/gui/server/api.py
index bbae6066..d8c3aaf8 100644
--- a/g4f/gui/server/api.py
+++ b/g4f/gui/server/api.py
@@ -43,7 +43,16 @@ class Api():
"""
Return a list of all working providers.
"""
- return [provider.__name__ for provider in __providers__ if provider.working]
+ return {
+ provider.__name__: (provider.label
+ if hasattr(provider, "label")
+ else provider.__name__) +
+ (" (WebDriver)"
+ if "webdriver" in provider.get_parameters()
+ else "")
+ for provider in __providers__
+ if provider.working
+ }
def get_version(self):
"""