summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-01-24 01:05:29 +0100
committerGitHub <noreply@github.com>2024-01-24 01:05:29 +0100
commit71d71b6512df12d6295c1f8323eb056edd89c57f (patch)
treedb88f2c8237b51519401e077df059b71f41e6404 /g4f
parentMerge pull request #1509 from hlohaus/sort (diff)
parentAdd get_connector helper (diff)
downloadgpt4free-0.2.0.5.tar
gpt4free-0.2.0.5.tar.gz
gpt4free-0.2.0.5.tar.bz2
gpt4free-0.2.0.5.tar.lz
gpt4free-0.2.0.5.tar.xz
gpt4free-0.2.0.5.tar.zst
gpt4free-0.2.0.5.zip
Diffstat (limited to 'g4f')
-rw-r--r--g4f/Provider/Bing.py22
-rw-r--r--g4f/Provider/HuggingChat.py8
-rw-r--r--g4f/Provider/Liaobots.py7
-rw-r--r--g4f/Provider/PerplexityLabs.py6
-rw-r--r--g4f/Provider/bing/create_images.py11
-rw-r--r--g4f/Provider/helper.py15
-rw-r--r--g4f/errors.py3
-rw-r--r--g4f/typing.py4
8 files changed, 50 insertions, 26 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index b869a6ef..11bb1414 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -6,11 +6,12 @@ import os
import uuid
import time
from urllib import parse
-from aiohttp import ClientSession, ClientTimeout
+from aiohttp import ClientSession, ClientTimeout, BaseConnector
from ..typing import AsyncResult, Messages, ImageType
from ..image import ImageResponse
from .base_provider import AsyncGeneratorProvider
+from .helper import get_connector
from .bing.upload_image import upload_image
from .bing.create_images import create_images
from .bing.conversation import Conversation, create_conversation, delete_conversation
@@ -39,6 +40,7 @@ class Bing(AsyncGeneratorProvider):
proxy: str = None,
timeout: int = 900,
cookies: dict = None,
+ connector: BaseConnector = None,
tone: str = Tones.balanced,
image: ImageType = None,
web_search: bool = False,
@@ -68,7 +70,7 @@ class Bing(AsyncGeneratorProvider):
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
- return stream_generate(prompt, tone, image, context, proxy, cookies, web_search, gpt4_turbo, timeout)
+ return stream_generate(prompt, tone, image, context, cookies, get_connector(connector, proxy), web_search, gpt4_turbo, timeout)
def create_context(messages: Messages) -> str:
"""
@@ -253,8 +255,8 @@ async def stream_generate(
tone: str,
image: ImageType = None,
context: str = None,
- proxy: str = None,
cookies: dict = None,
+ connector: BaseConnector = None,
web_search: bool = False,
gpt4_turbo: bool = False,
timeout: int = 900
@@ -266,7 +268,6 @@ async def stream_generate(
:param tone: The desired tone for the response.
:param image: The image type involved in the response.
:param context: Additional context for the prompt.
- :param proxy: Proxy settings for the request.
:param cookies: Cookies for the session.
:param web_search: Flag to enable web search.
:param gpt4_turbo: Flag to enable GPT-4 Turbo.
@@ -278,10 +279,10 @@ async def stream_generate(
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
async with ClientSession(
- timeout=ClientTimeout(total=timeout), headers=headers
+ timeout=ClientTimeout(total=timeout), headers=headers, connector=connector
) as session:
- conversation = await create_conversation(session, proxy)
- image_response = await upload_image(session, image, tone, proxy) if image else None
+ conversation = await create_conversation(session)
+ image_response = await upload_image(session, image, tone) if image else None
if image_response:
yield image_response
@@ -289,8 +290,7 @@ async def stream_generate(
async with session.ws_connect(
'wss://sydney.bing.com/sydney/ChatHub',
autoping=False,
- params={'sec_access_token': conversation.conversationSignature},
- proxy=proxy
+ params={'sec_access_token': conversation.conversationSignature}
) as wss:
await wss.send_str(format_message({'protocol': 'json', 'version': 1}))
await wss.receive(timeout=timeout)
@@ -322,7 +322,7 @@ async def stream_generate(
elif message.get('contentType') == "IMAGE":
prompt = message.get('text')
try:
- image_response = ImageResponse(await create_images(session, prompt, proxy), prompt)
+ image_response = ImageResponse(await create_images(session, prompt), prompt)
except:
response_txt += f"\nhttps://www.bing.com/images/create?q={parse.quote(prompt)}"
final = True
@@ -342,4 +342,4 @@ async def stream_generate(
raise Exception(f"{result['value']}: {result['message']}")
return
finally:
- await delete_conversation(session, conversation, proxy)
+ await delete_conversation(session, conversation)
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py
index d493da8f..79e4ae38 100644
--- a/g4f/Provider/HuggingChat.py
+++ b/g4f/Provider/HuggingChat.py
@@ -2,11 +2,11 @@ from __future__ import annotations
import json, uuid
-from aiohttp import ClientSession
+from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from .helper import format_prompt, get_cookies
+from .helper import format_prompt, get_cookies, get_connector
class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
@@ -33,6 +33,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
stream: bool = True,
proxy: str = None,
+ connector: BaseConnector = None,
web_search: bool = False,
cookies: dict = None,
**kwargs
@@ -45,7 +46,8 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
}
async with ClientSession(
cookies=cookies,
- headers=headers
+ headers=headers,
+ connector=get_connector(connector, proxy)
) as session:
async with session.post(f"{cls.url}/conversation", json={"model": cls.get_model(model)}, proxy=proxy) as response:
conversation_id = (await response.json())["conversationId"]
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py
index 5151c115..e93642ba 100644
--- a/g4f/Provider/Liaobots.py
+++ b/g4f/Provider/Liaobots.py
@@ -2,10 +2,11 @@ from __future__ import annotations
import uuid
-from aiohttp import ClientSession
+from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from .helper import get_connector
models = {
"gpt-4": {
@@ -91,6 +92,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
auth: str = None,
proxy: str = None,
+ connector: BaseConnector = None,
**kwargs
) -> AsyncResult:
headers = {
@@ -102,7 +104,8 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
}
async with ClientSession(
headers=headers,
- cookie_jar=cls._cookie_jar
+ cookie_jar=cls._cookie_jar,
+ connector=get_connector(connector, proxy)
) as session:
cls._auth_code = auth if isinstance(auth, str) else cls._auth_code
if not cls._auth_code:
diff --git a/g4f/Provider/PerplexityLabs.py b/g4f/Provider/PerplexityLabs.py
index 90258da5..5002b39f 100644
--- a/g4f/Provider/PerplexityLabs.py
+++ b/g4f/Provider/PerplexityLabs.py
@@ -2,10 +2,11 @@ from __future__ import annotations
import random
import json
-from aiohttp import ClientSession
+from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from .helper import get_connector
API_URL = "https://labs-api.perplexity.ai/socket.io/"
WS_URL = "wss://labs-api.perplexity.ai/socket.io/"
@@ -32,6 +33,7 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
model: str,
messages: Messages,
proxy: str = None,
+ connector: BaseConnector = None,
**kwargs
) -> AsyncResult:
headers = {
@@ -47,7 +49,7 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
"Sec-Fetch-Site": "same-site",
"TE": "trailers",
}
- async with ClientSession(headers=headers) as session:
+ async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
t = format(random.getrandbits(32), '08x')
async with session.get(
f"{API_URL}?EIO=4&transport=polling&t={t}",
diff --git a/g4f/Provider/bing/create_images.py b/g4f/Provider/bing/create_images.py
index 288a3d90..af39ef1e 100644
--- a/g4f/Provider/bing/create_images.py
+++ b/g4f/Provider/bing/create_images.py
@@ -7,13 +7,13 @@ import asyncio
import time
import json
import os
-from aiohttp import ClientSession
+from aiohttp import ClientSession, BaseConnector
from bs4 import BeautifulSoup
from urllib.parse import quote
from typing import Generator, List, Dict
from ..create_images import CreateImagesProvider
-from ..helper import get_cookies
+from ..helper import get_cookies, get_connector
from ...webdriver import WebDriver, get_driver_cookies, get_browser
from ...base_provider import ProviderType
from ...image import ImageResponse
@@ -50,7 +50,7 @@ def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None:
raise RuntimeError("Timeout error")
time.sleep(0.5)
-def create_session(cookies: Dict[str, str]) -> ClientSession:
+def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseConnector = None) -> ClientSession:
"""
Creates a new client session with specified cookies and headers.
@@ -79,7 +79,7 @@ def create_session(cookies: Dict[str, str]) -> ClientSession:
}
if cookies:
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
- return ClientSession(headers=headers)
+ return ClientSession(headers=headers, connector=get_connector(connector, proxy))
async def create_images(session: ClientSession, prompt: str, proxy: str = None, timeout: int = TIMEOUT_IMAGE_CREATION) -> List[str]:
"""
@@ -214,7 +214,8 @@ class CreateImagesBing:
cookies = self.cookies or get_cookies(".bing.com")
if "_U" not in cookies:
raise RuntimeError('"_U" cookie is missing')
- async with create_session(cookies) as session:
+ proxy = os.environ.get("G4F_PROXY")
+ async with create_session(cookies, proxy) as session:
images = await create_images(session, prompt, self.proxy)
return ImageResponse(images, prompt)
diff --git a/g4f/Provider/helper.py b/g4f/Provider/helper.py
index fce1ee6f..cf204e39 100644
--- a/g4f/Provider/helper.py
+++ b/g4f/Provider/helper.py
@@ -6,13 +6,15 @@ import random
import secrets
import string
from asyncio import AbstractEventLoop, BaseEventLoop
+from aiohttp import BaseConnector
from platformdirs import user_config_dir
from browser_cookie3 import (
chrome, chromium, opera, opera_gx,
brave, edge, vivaldi, firefox,
_LinuxPasswordManager, BrowserCookieError
)
-from ..typing import Dict, Messages
+from ..typing import Dict, Messages, Optional
+from ..errors import AiohttpSocksError
from .. import debug
# Global variable to store cookies
@@ -147,4 +149,13 @@ def get_random_hex() -> str:
Returns:
str: A random hexadecimal string of 32 characters (16 bytes).
"""
- return secrets.token_hex(16).zfill(32) \ No newline at end of file
+ return secrets.token_hex(16).zfill(32)
+
+def get_connector(connector: BaseConnector = None, proxy: str = None) -> Optional[BaseConnector]:
+ if proxy and not connector:
+ try:
+ from aiohttp_socks import ProxyConnector
+ connector = ProxyConnector.from_url(proxy)
+ except ImportError:
+ raise AiohttpSocksError('Install "aiohttp_socks" package for proxy support')
+ return connector \ No newline at end of file
diff --git a/g4f/errors.py b/g4f/errors.py
index ddfe74db..aaaa6b4b 100644
--- a/g4f/errors.py
+++ b/g4f/errors.py
@@ -29,4 +29,7 @@ class NestAsyncioError(Exception):
pass
class ModelNotSupportedError(Exception):
+ pass
+
+class AiohttpSocksError(Exception):
pass \ No newline at end of file
diff --git a/g4f/typing.py b/g4f/typing.py
index a6a62e3f..fd4fd047 100644
--- a/g4f/typing.py
+++ b/g4f/typing.py
@@ -1,5 +1,5 @@
import sys
-from typing import Any, AsyncGenerator, Generator, NewType, Tuple, Union, List, Dict, Type, IO
+from typing import Any, AsyncGenerator, Generator, NewType, Tuple, Union, List, Dict, Type, IO, Optional
from PIL.Image import Image
if sys.version_info >= (3, 8):
@@ -22,6 +22,8 @@ __all__ = [
'List',
'Dict',
'Type',
+ 'IO',
+ 'Optional',
'TypedDict',
'SHA256',
'CreateResult',