summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
Diffstat (limited to 'g4f')
-rw-r--r--g4f/Provider/Bing.py21
-rw-r--r--g4f/Provider/Liaobots.py1
-rw-r--r--g4f/Provider/Wuguokai.py6
-rw-r--r--g4f/Provider/__init__.py2
4 files changed, 6 insertions, 24 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index c31ba5a7..48968e39 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -26,32 +26,15 @@ class Bing(AsyncGeneratorProvider):
prompt = messages[-1]["content"]
context = create_context(messages[:-1])
- if cookies:
+ if cookies and "SRCHD" in cookies:
#TODO: Will implement proper cookie retrieval later and use a try-except mechanism in 'stream_generate' instead of defaulting the cookie value like this
cookies_dict = {
- 'MUID' : '',
- 'BCP' : '',
- 'MUIDB' : '',
- 'USRLOC' : '',
- 'SRCHD' : 'AF=hpcodx',
- 'MMCASM' : '',
- '_UR' : '',
- 'ANON' : '',
- 'NAP' : '',
- 'ABDEF' : '',
+ 'SRCHD' : cookies["SRCHD"],
'PPLState' : '1',
'KievRPSSecAuth': '',
- '_U' : '',
'SUID' : '',
- '_EDGE_S' : '',
- 'WLS' : '',
- '_HPVN' : '',
- '_SS' : '',
- '_clck' : '',
'SRCHUSR' : '',
- '_RwBf' : '',
'SRCHHPGUSR' : '',
- 'ipv6' : '',
}
return stream_generate(prompt, context, cookies_dict)
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py
index e69a565e..95a0ea06 100644
--- a/g4f/Provider/Liaobots.py
+++ b/g4f/Provider/Liaobots.py
@@ -28,6 +28,7 @@ models = {
class Liaobots(AsyncGeneratorProvider):
url = "https://liaobots.com"
+ working = True
supports_stream = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
diff --git a/g4f/Provider/Wuguokai.py b/g4f/Provider/Wuguokai.py
index 906283ad..8a5aac0d 100644
--- a/g4f/Provider/Wuguokai.py
+++ b/g4f/Provider/Wuguokai.py
@@ -1,4 +1,4 @@
-import random, requests, json
+import random, requests
from ..typing import Any, CreateResult
from .base_provider import BaseProvider
@@ -6,8 +6,6 @@ from .base_provider import BaseProvider
class Wuguokai(BaseProvider):
url = 'https://chat.wuguokai.xyz'
supports_gpt_35_turbo = True
- supports_stream = False
- needs_auth = False
working = True
@staticmethod
@@ -43,7 +41,7 @@ class Wuguokai(BaseProvider):
"userId": f"#/chat/{random.randint(1,99999999)}",
"usingContext": True
}
- response = requests.post("https://ai-api20.wuguokai.xyz/api/chat-process", headers=headers, data=json.dumps(data),proxies=kwargs['proxy'] if 'proxy' in kwargs else {})
+ response = requests.post("https://ai-api20.wuguokai.xyz/api/chat-process", headers=headers, timeout=3, json=data, proxies=kwargs['proxy'] if 'proxy' in kwargs else {})
_split = response.text.split("> 若回答失败请重试或多刷新几次界面后重试")
if response.status_code == 200:
if len(_split) > 1:
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 06643665..fce66d0c 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -30,7 +30,7 @@ from .FastGpt import FastGpt
from .V50 import V50
from .Wuguokai import Wuguokai
-from .base_provider import BaseProvider
+from .base_provider import BaseProvider, AsyncProvider, AsyncGeneratorProvider
__all__ = [
'BaseProvider',