summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/Provider/Bing.py3
-rw-r--r--g4f/Provider/DuckDuckGo.py64
-rw-r--r--g4f/Provider/HuggingFace.py6
-rw-r--r--g4f/Provider/WhiteRabbitNeo.py57
-rw-r--r--g4f/Provider/__init__.py2
5 files changed, 131 insertions, 1 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index aa1b37b0..1e462084 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -46,6 +46,7 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
proxy: str = None,
timeout: int = 900,
+ api_key: str = None,
cookies: Cookies = None,
connector: BaseConnector = None,
tone: str = None,
@@ -68,6 +69,8 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
:return: An asynchronous result object.
"""
prompt = messages[-1]["content"]
+ if api_key is not None:
+ cookies["_U"] = api_key
if context is None:
context = create_context(messages[:-1]) if len(messages) > 1 else None
if tone is None:
diff --git a/g4f/Provider/DuckDuckGo.py b/g4f/Provider/DuckDuckGo.py
new file mode 100644
index 00000000..5269ced3
--- /dev/null
+++ b/g4f/Provider/DuckDuckGo.py
@@ -0,0 +1,64 @@
+from __future__ import annotations
+
+import json
+import aiohttp
+
+from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ..typing import AsyncResult, Messages
+from ..requests.raise_for_status import raise_for_status
+
+class DuckDuckGo(AsyncGeneratorProvider, ProviderModelMixin):
+ url = "https://duckduckgo.com/duckchat"
+ working = True
+ supports_gpt_35_turbo = True
+ supports_message_history = True
+
+ default_model = "gpt-3.5-turbo-0125"
+ models = ["gpt-3.5-turbo-0125", "claude-instant-1.2"]
+ model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"}
+
+ status_url = "https://duckduckgo.com/duckchat/v1/status"
+ chat_url = "https://duckduckgo.com/duckchat/v1/chat"
+ user_agent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0'
+ headers = {
+ 'User-Agent': user_agent,
+ 'Accept': 'text/event-stream',
+ 'Accept-Language': 'de,en-US;q=0.7,en;q=0.3',
+ 'Accept-Encoding': 'gzip, deflate, br',
+ 'Referer': 'https://duckduckgo.com/',
+ 'Content-Type': 'application/json',
+ 'Origin': 'https://duckduckgo.com',
+ 'Connection': 'keep-alive',
+ 'Cookie': 'dcm=1',
+ 'Sec-Fetch-Dest': 'empty',
+ 'Sec-Fetch-Mode': 'cors',
+ 'Sec-Fetch-Site': 'same-origin',
+ 'Pragma': 'no-cache',
+ 'TE': 'trailers'
+ }
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ **kwargs
+ ) -> AsyncResult:
+ async with aiohttp.ClientSession(headers=cls.headers) as session:
+ async with session.get(cls.status_url, headers={"x-vqd-accept": "1"}) as response:
+ await raise_for_status(response)
+ vqd_4 = response.headers.get("x-vqd-4")
+ payload = {
+ 'model': cls.get_model(model),
+ 'messages': messages
+ }
+ async with session.post(cls.chat_url, json=payload, headers={"x-vqd-4": vqd_4}) as response:
+ await raise_for_status(response)
+ async for line in response.content:
+ if line.startswith(b"data: "):
+ chunk = line[6:]
+ if chunk.startswith(b"[DONE]"):
+ break
+ data = json.loads(chunk)
+ if "message" in data:
+ yield data["message"] \ No newline at end of file
diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py
index 647780fd..6a05c26e 100644
--- a/g4f/Provider/HuggingFace.py
+++ b/g4f/Provider/HuggingFace.py
@@ -13,6 +13,10 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://huggingface.co/chat"
working = True
supports_message_history = True
+ models = [
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
+ "mistralai/Mistral-7B-Instruct-v0.2"
+ ]
default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
@classmethod
@@ -29,7 +33,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
temperature: float = 0.7,
**kwargs
) -> AsyncResult:
- model = cls.get_model(model)
+ model = cls.get_model(model) if not model else model
headers = {}
if api_key is not None:
headers["Authorization"] = f"Bearer {api_key}"
diff --git a/g4f/Provider/WhiteRabbitNeo.py b/g4f/Provider/WhiteRabbitNeo.py
new file mode 100644
index 00000000..339434e6
--- /dev/null
+++ b/g4f/Provider/WhiteRabbitNeo.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from aiohttp import ClientSession, BaseConnector
+
+from ..typing import AsyncResult, Messages, Cookies
+from ..requests.raise_for_status import raise_for_status
+from .base_provider import AsyncGeneratorProvider
+from .helper import get_cookies, get_connector, get_random_string
+
+class WhiteRabbitNeo(AsyncGeneratorProvider):
+ url = "https://www.whiterabbitneo.com"
+ working = True
+ supports_message_history = True
+ needs_auth = True
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ cookies: Cookies = None,
+ connector: BaseConnector = None,
+ proxy: str = None,
+ **kwargs
+ ) -> AsyncResult:
+ if cookies is None:
+ cookies = get_cookies("www.whiterabbitneo.com")
+ headers = {
+ "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0",
+ "Accept": "*/*",
+ "Accept-Language": "de,en-US;q=0.7,en;q=0.3",
+ "Accept-Encoding": "gzip, deflate, br",
+ "Referer": f"{cls.url}/",
+ "Content-Type": "text/plain;charset=UTF-8",
+ "Origin": cls.url,
+ "Connection": "keep-alive",
+ "Sec-Fetch-Dest": "empty",
+ "Sec-Fetch-Mode": "cors",
+ "Sec-Fetch-Site": "same-origin",
+ "TE": "trailers"
+ }
+ async with ClientSession(
+ headers=headers,
+ cookies=cookies,
+ connector=get_connector(connector, proxy)
+ ) as session:
+ data = {
+ "messages": messages,
+ "id": get_random_string(6),
+ "enhancePrompt": False,
+ "useFunctions": False
+ }
+ async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response:
+ await raise_for_status(response)
+ async for chunk in response.content.iter_any():
+ if chunk:
+ yield chunk.decode(errors="ignore") \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 1db29e19..b567305c 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -21,6 +21,7 @@ from .ChatgptFree import ChatgptFree
from .ChatgptNext import ChatgptNext
from .ChatgptX import ChatgptX
from .DeepInfra import DeepInfra
+from .DuckDuckGo import DuckDuckGo
from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt
@@ -37,6 +38,7 @@ from .Local import Local
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
from .Vercel import Vercel
+from .WhiteRabbitNeo import WhiteRabbitNeo
from .You import You
import sys