summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-02-09 14:24:15 +0100
committerGitHub <noreply@github.com>2024-02-09 14:24:15 +0100
commit5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7 (patch)
tree739dc1c020fcaff53d3f80630036b9aa2ac94f54 /g4f/Provider
parentMerge pull request #1565 from hlohaus/gemini (diff)
downloadgpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar
gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.gz
gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.bz2
gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.lz
gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.xz
gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.tar.zst
gpt4free-5c75972c50ac936cb6dc8e01cef9cdb08daa8ed7.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/CreateImagesBing.py94
-rw-r--r--g4f/Provider/GptChatly.py1
-rw-r--r--g4f/Provider/Hashnode.py2
-rw-r--r--g4f/Provider/Phind.py2
-rw-r--r--g4f/Provider/__init__.py2
-rw-r--r--g4f/Provider/bing/create_images.py90
-rw-r--r--g4f/Provider/helper.py99
-rw-r--r--g4f/Provider/needs_auth/ThebApi.py2
8 files changed, 107 insertions, 185 deletions
diff --git a/g4f/Provider/CreateImagesBing.py b/g4f/Provider/CreateImagesBing.py
new file mode 100644
index 00000000..4d045188
--- /dev/null
+++ b/g4f/Provider/CreateImagesBing.py
@@ -0,0 +1,94 @@
+from __future__ import annotations
+
+import asyncio
+import time
+import os
+from typing import Generator
+
+from ..cookies import get_cookies
+from ..webdriver import WebDriver, get_driver_cookies, get_browser
+from ..image import ImageResponse
+from ..errors import MissingRequirementsError, MissingAuthError
+from .bing.create_images import BING_URL, create_images, create_session
+
+BING_URL = "https://www.bing.com"
+TIMEOUT_LOGIN = 1200
+
+def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None:
+ """
+ Waits for the user to log in within a given timeout period.
+
+ Args:
+ driver (WebDriver): Webdriver for browser automation.
+ timeout (int): Maximum waiting time in seconds.
+
+ Raises:
+ RuntimeError: If the login process exceeds the timeout.
+ """
+ driver.get(f"{BING_URL}/")
+ start_time = time.time()
+ while not driver.get_cookie("_U"):
+ if time.time() - start_time > timeout:
+ raise RuntimeError("Timeout error")
+ time.sleep(0.5)
+
+def get_cookies_from_browser(proxy: str = None) -> dict[str, str]:
+ """
+ Retrieves cookies from the browser using webdriver.
+
+ Args:
+ proxy (str, optional): Proxy configuration.
+
+ Returns:
+ dict[str, str]: Retrieved cookies.
+ """
+ with get_browser(proxy=proxy) as driver:
+ wait_for_login(driver)
+ time.sleep(1)
+ return get_driver_cookies(driver)
+
+class CreateImagesBing:
+ """A class for creating images using Bing."""
+
+ def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None:
+ self.cookies = cookies
+ self.proxy = proxy
+
+ def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]:
+ """
+ Generator for creating imagecompletion based on a prompt.
+
+ Args:
+ prompt (str): Prompt to generate images.
+
+ Yields:
+ Generator[str, None, None]: The final output as markdown formatted string with images.
+ """
+ cookies = self.cookies or get_cookies(".bing.com", False)
+ if "_U" not in cookies:
+ login_url = os.environ.get("G4F_LOGIN_URL")
+ if login_url:
+ yield f"Please login: [Bing]({login_url})\n\n"
+ try:
+ self.cookies = get_cookies_from_browser(self.proxy)
+ except MissingRequirementsError as e:
+ raise MissingAuthError(f'Missing "_U" cookie. {e}')
+ yield asyncio.run(self.create_async(prompt))
+
+ async def create_async(self, prompt: str) -> ImageResponse:
+ """
+ Asynchronously creates a markdown formatted string with images based on the prompt.
+
+ Args:
+ prompt (str): Prompt to generate images.
+
+ Returns:
+ str: Markdown formatted string with images.
+ """
+ cookies = self.cookies or get_cookies(".bing.com", False)
+ if "_U" not in cookies:
+ raise MissingAuthError('Missing "_U" cookie')
+ proxy = os.environ.get("G4F_PROXY")
+ async with create_session(cookies, proxy) as session:
+ images = await create_images(session, prompt, self.proxy)
+ return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"}) \ No newline at end of file
diff --git a/g4f/Provider/GptChatly.py b/g4f/Provider/GptChatly.py
index d98c2af4..9fb739a8 100644
--- a/g4f/Provider/GptChatly.py
+++ b/g4f/Provider/GptChatly.py
@@ -10,7 +10,6 @@ class GptChatly(AsyncProvider):
working = True
supports_message_history = True
supports_gpt_35_turbo = True
- supports_gpt_4 = True
@classmethod
async def create_async(
diff --git a/g4f/Provider/Hashnode.py b/g4f/Provider/Hashnode.py
index a287fa7c..7a0c2903 100644
--- a/g4f/Provider/Hashnode.py
+++ b/g4f/Provider/Hashnode.py
@@ -13,7 +13,7 @@ class SearchTypes():
class Hashnode(AsyncGeneratorProvider):
url = "https://hashnode.com"
- working = True
+ working = False
supports_message_history = True
supports_gpt_35_turbo = True
_sources = []
diff --git a/g4f/Provider/Phind.py b/g4f/Provider/Phind.py
index a7fdbeca..746dcbcc 100644
--- a/g4f/Provider/Phind.py
+++ b/g4f/Provider/Phind.py
@@ -67,7 +67,7 @@ class Phind(AsyncGeneratorProvider):
if chunk.startswith(b'<PHIND_DONE/>'):
break
if chunk.startswith(b'<PHIND_BACKEND_ERROR>'):
- raise RuntimeError(f"Response: {chunk}")
+ raise RuntimeError(f"Response: {chunk.decode()}")
if chunk.startswith(b'<PHIND_WEBRESULTS>') or chunk.startswith(b'<PHIND_FOLLOWUP>'):
pass
elif chunk.startswith(b"<PHIND_METADATA>") or chunk.startswith(b"<PHIND_INDICATOR>"):
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 1b45b00d..7dbc1504 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -53,6 +53,8 @@ from .Vercel import Vercel
from .Ylokh import Ylokh
from .You import You
+from .CreateImagesBing import CreateImagesBing
+
import sys
__modules__: list = [
diff --git a/g4f/Provider/bing/create_images.py b/g4f/Provider/bing/create_images.py
index b31e9408..de4fd476 100644
--- a/g4f/Provider/bing/create_images.py
+++ b/g4f/Provider/bing/create_images.py
@@ -7,10 +7,9 @@ from __future__ import annotations
import asyncio
import time
import json
-import os
from aiohttp import ClientSession, BaseConnector
from urllib.parse import quote
-from typing import Generator, List, Dict
+from typing import List, Dict
try:
from bs4 import BeautifulSoup
@@ -19,14 +18,11 @@ except ImportError:
has_requirements = False
from ..create_images import CreateImagesProvider
-from ..helper import get_cookies, get_connector
-from ...webdriver import WebDriver, get_driver_cookies, get_browser
+from ..helper import get_connector
from ...base_provider import ProviderType
-from ...image import ImageResponse
-from ...errors import MissingRequirementsError, MissingAuthError
+from ...errors import MissingRequirementsError
BING_URL = "https://www.bing.com"
-TIMEOUT_LOGIN = 1200
TIMEOUT_IMAGE_CREATION = 300
ERRORS = [
"this prompt is being reviewed",
@@ -39,24 +35,6 @@ BAD_IMAGES = [
"https://r.bing.com/rp/TX9QuO3WzcCJz1uaaSwQAz39Kb0.jpg",
]
-def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None:
- """
- Waits for the user to log in within a given timeout period.
-
- Args:
- driver (WebDriver): Webdriver for browser automation.
- timeout (int): Maximum waiting time in seconds.
-
- Raises:
- RuntimeError: If the login process exceeds the timeout.
- """
- driver.get(f"{BING_URL}/")
- start_time = time.time()
- while not driver.get_cookie("_U"):
- if time.time() - start_time > timeout:
- raise RuntimeError("Timeout error")
- time.sleep(0.5)
-
def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseConnector = None) -> ClientSession:
"""
Creates a new client session with specified cookies and headers.
@@ -170,67 +148,6 @@ def read_images(html_content: str) -> List[str]:
raise RuntimeError("No images found")
return images
-def get_cookies_from_browser(proxy: str = None) -> dict[str, str]:
- """
- Retrieves cookies from the browser using webdriver.
-
- Args:
- proxy (str, optional): Proxy configuration.
-
- Returns:
- dict[str, str]: Retrieved cookies.
- """
- with get_browser(proxy=proxy) as driver:
- wait_for_login(driver)
- time.sleep(1)
- return get_driver_cookies(driver)
-
-class CreateImagesBing:
- """A class for creating images using Bing."""
-
- def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None:
- self.cookies = cookies
- self.proxy = proxy
-
- def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]:
- """
- Generator for creating imagecompletion based on a prompt.
-
- Args:
- prompt (str): Prompt to generate images.
-
- Yields:
- Generator[str, None, None]: The final output as markdown formatted string with images.
- """
- cookies = self.cookies or get_cookies(".bing.com", False)
- if "_U" not in cookies:
- login_url = os.environ.get("G4F_LOGIN_URL")
- if login_url:
- yield f"Please login: [Bing]({login_url})\n\n"
- try:
- self.cookies = get_cookies_from_browser(self.proxy)
- except MissingRequirementsError as e:
- raise MissingAuthError(f'Missing "_U" cookie. {e}')
- yield asyncio.run(self.create_async(prompt))
-
- async def create_async(self, prompt: str) -> ImageResponse:
- """
- Asynchronously creates a markdown formatted string with images based on the prompt.
-
- Args:
- prompt (str): Prompt to generate images.
-
- Returns:
- str: Markdown formatted string with images.
- """
- cookies = self.cookies or get_cookies(".bing.com", False)
- if "_U" not in cookies:
- raise MissingAuthError('Missing "_U" cookie')
- proxy = os.environ.get("G4F_PROXY")
- async with create_session(cookies, proxy) as session:
- images = await create_images(session, prompt, self.proxy)
- return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"})
-
def patch_provider(provider: ProviderType) -> CreateImagesProvider:
"""
Patches a provider to include image creation capabilities.
@@ -241,6 +158,7 @@ def patch_provider(provider: ProviderType) -> CreateImagesProvider:
Returns:
CreateImagesProvider: The patched provider with image creation capabilities.
"""
+ from ..CreateImagesBing import CreateImagesBing
service = CreateImagesBing()
return CreateImagesProvider(
provider,
diff --git a/g4f/Provider/helper.py b/g4f/Provider/helper.py
index 2cab5e6f..35480255 100644
--- a/g4f/Provider/helper.py
+++ b/g4f/Provider/helper.py
@@ -1,104 +1,13 @@
from __future__ import annotations
-import os
import random
import secrets
import string
from aiohttp import BaseConnector
-try:
- from platformdirs import user_config_dir
- has_platformdirs = True
-except ImportError:
- has_platformdirs = False
-try:
- from browser_cookie3 import (
- chrome, chromium, opera, opera_gx,
- brave, edge, vivaldi, firefox,
- _LinuxPasswordManager, BrowserCookieError
- )
- has_browser_cookie3 = True
-except ImportError:
- has_browser_cookie3 = False
-
-from ..typing import Dict, Messages, Cookies, Optional
-from ..errors import MissingAiohttpSocksError, MissingRequirementsError
-from .. import debug
-
-# Global variable to store cookies
-_cookies: Dict[str, Cookies] = {}
-
-if has_browser_cookie3 and os.environ.get('DBUS_SESSION_BUS_ADDRESS') == "/dev/null":
- _LinuxPasswordManager.get_password = lambda a, b: b"secret"
-
-def get_cookies(domain_name: str = '', raise_requirements_error: bool = True) -> Dict[str, str]:
- """
- Load cookies for a given domain from all supported browsers and cache the results.
-
- Args:
- domain_name (str): The domain for which to load cookies.
-
- Returns:
- Dict[str, str]: A dictionary of cookie names and values.
- """
- if domain_name in _cookies:
- return _cookies[domain_name]
-
- cookies = load_cookies_from_browsers(domain_name, raise_requirements_error)
- _cookies[domain_name] = cookies
- return cookies
-
-def set_cookies(domain_name: str, cookies: Cookies = None) -> None:
- if cookies:
- _cookies[domain_name] = cookies
- elif domain_name in _cookies:
- _cookies.pop(domain_name)
-
-def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True) -> Cookies:
- """
- Helper function to load cookies from various browsers.
-
- Args:
- domain_name (str): The domain for which to load cookies.
-
- Returns:
- Dict[str, str]: A dictionary of cookie names and values.
- """
- if not has_browser_cookie3:
- if raise_requirements_error:
- raise MissingRequirementsError('Install "browser_cookie3" package')
- return {}
- cookies = {}
- for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]:
- try:
- cookie_jar = cookie_fn(domain_name=domain_name)
- if len(cookie_jar) and debug.logging:
- print(f"Read cookies from {cookie_fn.__name__} for {domain_name}")
- for cookie in cookie_jar:
- if cookie.name not in cookies:
- cookies[cookie.name] = cookie.value
- except BrowserCookieError:
- pass
- except Exception as e:
- if debug.logging:
- print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}")
- return cookies
-
-def _g4f(domain_name: str) -> list:
- """
- Load cookies from the 'g4f' browser (if exists).
-
- Args:
- domain_name (str): The domain for which to load cookies.
-
- Returns:
- list: List of cookies.
- """
- if not has_platformdirs:
- return []
- user_data_dir = user_config_dir("g4f")
- cookie_file = os.path.join(user_data_dir, "Default", "Cookies")
- return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name)
+from ..typing import Messages, Optional
+from ..errors import MissingRequirementsError
+from ..cookies import get_cookies
def format_prompt(messages: Messages, add_special_tokens=False) -> str:
"""
@@ -149,5 +58,5 @@ def get_connector(connector: BaseConnector = None, proxy: str = None) -> Optiona
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
- raise MissingAiohttpSocksError('Install "aiohttp_socks" package for proxy support')
+ raise MissingRequirementsError('Install "aiohttp_socks" package for proxy support')
return connector \ No newline at end of file
diff --git a/g4f/Provider/needs_auth/ThebApi.py b/g4f/Provider/needs_auth/ThebApi.py
index ea633243..1c7baf8d 100644
--- a/g4f/Provider/needs_auth/ThebApi.py
+++ b/g4f/Provider/needs_auth/ThebApi.py
@@ -43,7 +43,7 @@ class ThebApi(AbstractProvider, ProviderModelMixin):
model: str,
messages: Messages,
stream: bool,
- auth: str,
+ auth: str = None,
proxy: str = None,
**kwargs
) -> CreateResult: