summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/Provider/BingCreateImages.py23
-rw-r--r--g4f/Provider/DeepInfra.py2
-rw-r--r--g4f/Provider/ReplicateImage.py96
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/Provider/bing/conversation.py4
-rw-r--r--g4f/Provider/bing/create_images.py2
-rw-r--r--g4f/Provider/unfinished/Replicate.py78
7 files changed, 198 insertions, 8 deletions
diff --git a/g4f/Provider/BingCreateImages.py b/g4f/Provider/BingCreateImages.py
index f9c4f3b3..51e06dc5 100644
--- a/g4f/Provider/BingCreateImages.py
+++ b/g4f/Provider/BingCreateImages.py
@@ -7,16 +7,33 @@ from typing import Iterator, Union
from ..cookies import get_cookies
from ..image import ImageResponse
from ..errors import MissingRequirementsError, MissingAuthError
-from ..typing import Cookies
+from ..typing import AsyncResult, Messages, Cookies
+from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .bing.create_images import create_images, create_session, get_cookies_from_browser
-class BingCreateImages:
- """A class for creating images using Bing."""
+class BingCreateImages(AsyncGeneratorProvider, ProviderModelMixin):
+ url = "https://www.bing.com/images/create"
+ working = True
def __init__(self, cookies: Cookies = None, proxy: str = None) -> None:
self.cookies: Cookies = cookies
self.proxy: str = proxy
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ api_key: str = None,
+ cookies: Cookies = None,
+ proxy: str = None,
+ **kwargs
+ ) -> AsyncResult:
+ if api_key is not None:
+ cookies = {"_U": api_key}
+ session = BingCreateImages(cookies, proxy)
+ yield await session.create_async(messages[-1]["content"])
+
def create(self, prompt: str) -> Iterator[Union[ImageResponse, str]]:
"""
Generator for creating imagecompletion based on a prompt.
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index 53c8d6b9..fb809b7f 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -35,7 +35,6 @@ class DeepInfra(Openai):
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US',
'Connection': 'keep-alive',
- 'Content-Type': None,
'Origin': 'https://deepinfra.com',
'Referer': 'https://deepinfra.com/',
'Sec-Fetch-Dest': 'empty',
@@ -43,7 +42,6 @@ class DeepInfra(Openai):
'Sec-Fetch-Site': 'same-site',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
'X-Deepinfra-Source': 'web-embed',
- 'Accept': None,
'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
diff --git a/g4f/Provider/ReplicateImage.py b/g4f/Provider/ReplicateImage.py
new file mode 100644
index 00000000..3e423c1a
--- /dev/null
+++ b/g4f/Provider/ReplicateImage.py
@@ -0,0 +1,96 @@
+from __future__ import annotations
+
+import random
+import asyncio
+
+from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ..typing import AsyncResult, Messages
+from ..requests import StreamSession, raise_for_status
+from ..image import ImageResponse
+from ..errors import ResponseError
+
+class ReplicateImage(AsyncGeneratorProvider, ProviderModelMixin):
+ url = "https://replicate.com"
+ working = True
+ default_model = 'stability-ai/sdxl'
+ default_versions = [
+ "39ed52f2a78e934b3ba6e2a89f5b1c712de7dfea535525255b1aa35c5565e08b",
+ "2b017d9b67edd2ee1401238df49d75da53c523f36e363881e057f5dc3ed3c5b2"
+ ]
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ **kwargs
+ ) -> AsyncResult:
+ yield await cls.create_async(messages[-1]["content"], model, **kwargs)
+
+ @classmethod
+ async def create_async(
+ cls,
+ prompt: str,
+ model: str,
+ api_key: str = None,
+ proxy: str = None,
+ timeout: int = 180,
+ version: str = None,
+ extra_data: dict = {},
+ **kwargs
+ ) -> ImageResponse:
+ headers = {
+ 'Accept-Encoding': 'gzip, deflate, br',
+ 'Accept-Language': 'en-US',
+ 'Connection': 'keep-alive',
+ 'Origin': cls.url,
+ 'Referer': f'{cls.url}/',
+ 'Sec-Fetch-Dest': 'empty',
+ 'Sec-Fetch-Mode': 'cors',
+ 'Sec-Fetch-Site': 'same-site',
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
+ 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ }
+ if version is None:
+ version = random.choice(cls.default_versions)
+ if api_key is not None:
+ headers["Authorization"] = f"Bearer {api_key}"
+ async with StreamSession(
+ proxies={"all": proxy},
+ headers=headers,
+ timeout=timeout
+ ) as session:
+ data = {
+ "input": {
+ "prompt": prompt,
+ **extra_data
+ },
+ "version": version
+ }
+ if api_key is None:
+ data["model"] = cls.get_model(model)
+ url = "https://homepage.replicate.com/api/prediction"
+ else:
+ url = "https://api.replicate.com/v1/predictions"
+ async with session.post(url, json=data) as response:
+ await raise_for_status(response)
+ result = await response.json()
+ if "id" not in result:
+ raise ResponseError(f"Invalid response: {result}")
+ while True:
+ if api_key is None:
+ url = f"https://homepage.replicate.com/api/poll?id={result['id']}"
+ else:
+ url = f"https://api.replicate.com/v1/predictions/{result['id']}"
+ async with session.get(url) as response:
+ await raise_for_status(response)
+ result = await response.json()
+ if "status" not in result:
+ raise ResponseError(f"Invalid response: {result}")
+ if result["status"] == "succeeded":
+ images = result['output']
+ images = images[0] if len(images) == 1 else images
+ return ImageResponse(images, prompt)
+ await asyncio.sleep(0.5) \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 7a39d023..e7d1e4b3 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -38,6 +38,7 @@ from .Llama2 import Llama2
from .Local import Local
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
+from .ReplicateImage import ReplicateImage
from .Vercel import Vercel
from .WhiteRabbitNeo import WhiteRabbitNeo
from .You import You
diff --git a/g4f/Provider/bing/conversation.py b/g4f/Provider/bing/conversation.py
index e9cdfe31..ef48cd91 100644
--- a/g4f/Provider/bing/conversation.py
+++ b/g4f/Provider/bing/conversation.py
@@ -34,9 +34,9 @@ async def create_conversation(session: ClientSession, headers: dict, tone: str)
Conversation: An instance representing the created conversation.
"""
if tone == "Copilot":
- url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1634.3-nodesign2"
+ url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1686.0"
else:
- url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1626.1"
+ url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1686.0"
async with session.get(url, headers=headers) as response:
if response.status == 404:
raise RateLimitError("Response 404: Do less requests and reuse conversations")
diff --git a/g4f/Provider/bing/create_images.py b/g4f/Provider/bing/create_images.py
index cb357708..44303c21 100644
--- a/g4f/Provider/bing/create_images.py
+++ b/g4f/Provider/bing/create_images.py
@@ -151,7 +151,7 @@ async def create_images(session: ClientSession, prompt: str, proxy: str = None,
if response.status != 200:
raise RuntimeError(f"Polling images faild. Code: {response.status}")
text = await response.text()
- if not text:
+ if not text or "GenerativeImagesStatusPage" in text:
await asyncio.sleep(1)
else:
break
diff --git a/g4f/Provider/unfinished/Replicate.py b/g4f/Provider/unfinished/Replicate.py
new file mode 100644
index 00000000..aaaf31b3
--- /dev/null
+++ b/g4f/Provider/unfinished/Replicate.py
@@ -0,0 +1,78 @@
+from __future__ import annotations
+
+import asyncio
+
+from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ..helper import format_prompt, filter_none
+from ...typing import AsyncResult, Messages
+from ...requests import StreamSession, raise_for_status
+from ...image import ImageResponse
+from ...errors import ResponseError, MissingAuthError
+
+class Replicate(AsyncGeneratorProvider, ProviderModelMixin):
+ url = "https://replicate.com"
+ working = True
+ default_model = "mistralai/mixtral-8x7b-instruct-v0.1"
+ api_base = "https://api.replicate.com/v1/models/"
+
+ @classmethod
+ async def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ api_key: str = None,
+ proxy: str = None,
+ timeout: int = 180,
+ system_prompt: str = None,
+ max_new_tokens: int = None,
+ temperature: float = None,
+ top_p: float = None,
+ top_k: float = None,
+ stop: list = None,
+ extra_data: dict = {},
+ headers: dict = {},
+ **kwargs
+ ) -> AsyncResult:
+ model = cls.get_model(model)
+ if api_key is None:
+ raise MissingAuthError("api_key is missing")
+ headers["Authorization"] = f"Bearer {api_key}"
+ async with StreamSession(
+ proxies={"all": proxy},
+ headers=headers,
+ timeout=timeout
+ ) as session:
+ data = {
+ "stream": True,
+ "input": {
+ "prompt": format_prompt(messages),
+ **filter_none(
+ system_prompt=system_prompt,
+ max_new_tokens=max_new_tokens,
+ temperature=temperature,
+ top_p=top_p,
+ top_k=top_k,
+ stop_sequences=",".join(stop) if stop else None
+ ),
+ **extra_data
+ },
+ }
+ url = f"{cls.api_base.rstrip('/')}/{model}/predictions"
+ async with session.post(url, json=data) as response:
+ await raise_for_status(response)
+ result = await response.json()
+ if "id" not in result:
+ raise ResponseError(f"Invalid response: {result}")
+ async with session.get(result["urls"]["stream"], headers={"Accept": "text/event-stream"}) as response:
+ await raise_for_status(response)
+ event = None
+ async for line in response.iter_lines():
+ if line.startswith(b"event: "):
+ event = line[7:]
+ elif event == b"output":
+ if line.startswith(b"data: "):
+ yield line[6:].decode()
+ elif not line.startswith(b"id: "):
+ continue#yield "+"+line.decode()
+ elif event == b"done":
+ break \ No newline at end of file